You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

util.py 26 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. """
  4. Created on Tue Mar 31 17:06:22 2020
  5. @author: ljia
  6. """
  7. import numpy as np
  8. from itertools import combinations
  9. import multiprocessing
  10. from multiprocessing import Pool
  11. from functools import partial
  12. import sys
  13. # from tqdm import tqdm
  14. import networkx as nx
  15. from gklearn.ged.env import GEDEnv
  16. from gklearn.utils import get_iters
  17. def compute_ged(g1, g2, options):
  18. from gklearn.gedlib import librariesImport, gedlibpy
  19. ged_env = gedlibpy.GEDEnv()
  20. ged_env.set_edit_cost(options['edit_cost'], edit_cost_constant=options['edit_cost_constants'])
  21. ged_env.add_nx_graph(g1, '')
  22. ged_env.add_nx_graph(g2, '')
  23. listID = ged_env.get_all_graph_ids()
  24. ged_env.init(init_type=options['init_option'])
  25. ged_env.set_method(options['method'], ged_options_to_string(options))
  26. ged_env.init_method()
  27. g = listID[0]
  28. h = listID[1]
  29. ged_env.run_method(g, h)
  30. pi_forward = ged_env.get_forward_map(g, h)
  31. pi_backward = ged_env.get_backward_map(g, h)
  32. upper = ged_env.get_upper_bound(g, h)
  33. dis = upper
  34. # make the map label correct (label remove map as np.inf)
  35. nodes1 = [n for n in g1.nodes()]
  36. nodes2 = [n for n in g2.nodes()]
  37. nb1 = nx.number_of_nodes(g1)
  38. nb2 = nx.number_of_nodes(g2)
  39. pi_forward = [nodes2[pi] if pi < nb2 else np.inf for pi in pi_forward]
  40. pi_backward = [nodes1[pi] if pi < nb1 else np.inf for pi in pi_backward]
  41. # print(pi_forward)
  42. return dis, pi_forward, pi_backward
  43. def pairwise_ged(g1, g2, options={}, sort=True, repeats=1, parallel=False, verbose=True):
  44. from gklearn.gedlib import librariesImport, gedlibpy
  45. ged_env = gedlibpy.GEDEnv()
  46. ged_env.set_edit_cost(options['edit_cost'], edit_cost_constant=options['edit_cost_constants'])
  47. ged_env.add_nx_graph(g1, '')
  48. ged_env.add_nx_graph(g2, '')
  49. listID = ged_env.get_all_graph_ids()
  50. ged_env.init(init_option=(options['init_option'] if 'init_option' in options else 'EAGER_WITHOUT_SHUFFLED_COPIES'))
  51. ged_env.set_method(options['method'], ged_options_to_string(options))
  52. ged_env.init_method()
  53. g = listID[0]
  54. h = listID[1]
  55. dis_min = np.inf
  56. # print('------------------------------------------')
  57. for i in range(0, repeats):
  58. ged_env.run_method(g, h)
  59. upper = ged_env.get_upper_bound(g, h)
  60. dis = upper
  61. # print(dis)
  62. if dis < dis_min:
  63. dis_min = dis
  64. pi_forward = ged_env.get_forward_map(g, h)
  65. pi_backward = ged_env.get_backward_map(g, h)
  66. # lower = ged_env.get_lower_bound(g, h)
  67. # make the map label correct (label remove map as np.inf)
  68. nodes1 = [n for n in g1.nodes()]
  69. nodes2 = [n for n in g2.nodes()]
  70. nb1 = nx.number_of_nodes(g1)
  71. nb2 = nx.number_of_nodes(g2)
  72. pi_forward = [nodes2[pi] if pi < nb2 else np.inf for pi in pi_forward]
  73. pi_backward = [nodes1[pi] if pi < nb1 else np.inf for pi in pi_backward]
  74. # print(pi_forward)
  75. return dis, pi_forward, pi_backward
  76. def compute_geds_cml(graphs, options={}, sort=True, parallel=False, verbose=True):
  77. # initialize ged env.
  78. ged_env = GEDEnv()
  79. ged_env.set_edit_cost(options['edit_cost'], edit_cost_constants=options['edit_cost_constants'])
  80. for g in graphs:
  81. ged_env.add_nx_graph(g, '')
  82. listID = ged_env.get_all_graph_ids()
  83. node_labels = ged_env.get_all_node_labels()
  84. edge_labels = ged_env.get_all_edge_labels()
  85. node_label_costs = label_costs_to_matrix(options['node_label_costs'], len(node_labels)) if 'node_label_costs' in options else None
  86. edge_label_costs = label_costs_to_matrix(options['edge_label_costs'], len(edge_labels)) if 'edge_label_costs' in options else None
  87. ged_env.set_label_costs(node_label_costs, edge_label_costs)
  88. ged_env.init(init_type=options['init_option'])
  89. if parallel:
  90. options['threads'] = 1
  91. ged_env.set_method(options['method'], options)
  92. ged_env.init_method()
  93. # compute ged.
  94. # options used to compute numbers of edit operations.
  95. if node_label_costs is None and edge_label_costs is None:
  96. neo_options = {'edit_cost': options['edit_cost'],
  97. 'is_cml': False,
  98. 'node_labels': options['node_labels'], 'edge_labels': options['edge_labels'],
  99. 'node_attrs': options['node_attrs'], 'edge_attrs': options['edge_attrs']}
  100. else:
  101. neo_options = {'edit_cost': options['edit_cost'],
  102. 'is_cml': True,
  103. 'node_labels': node_labels,
  104. 'edge_labels': edge_labels}
  105. ged_mat = np.zeros((len(graphs), len(graphs)))
  106. if parallel:
  107. len_itr = int(len(graphs) * (len(graphs) - 1) / 2)
  108. ged_vec = [0 for i in range(len_itr)]
  109. n_edit_operations = [0 for i in range(len_itr)]
  110. itr = combinations(range(0, len(graphs)), 2)
  111. n_jobs = multiprocessing.cpu_count()
  112. if len_itr < 100 * n_jobs:
  113. chunksize = int(len_itr / n_jobs) + 1
  114. else:
  115. chunksize = 100
  116. def init_worker(graphs_toshare, ged_env_toshare, listID_toshare):
  117. global G_graphs, G_ged_env, G_listID
  118. G_graphs = graphs_toshare
  119. G_ged_env = ged_env_toshare
  120. G_listID = listID_toshare
  121. do_partial = partial(_wrapper_compute_ged_parallel, neo_options, sort)
  122. pool = Pool(processes=n_jobs, initializer=init_worker, initargs=(graphs, ged_env, listID))
  123. iterator = get_iters(pool.imap_unordered(do_partial, itr, chunksize), desc='computing GEDs', file=sys.stdout, length=len_itr, verbose=verbose)
  124. # iterator = pool.imap_unordered(do_partial, itr, chunksize)
  125. for i, j, dis, n_eo_tmp in iterator:
  126. idx_itr = int(len(graphs) * i + j - (i + 1) * (i + 2) / 2)
  127. ged_vec[idx_itr] = dis
  128. ged_mat[i][j] = dis
  129. ged_mat[j][i] = dis
  130. n_edit_operations[idx_itr] = n_eo_tmp
  131. # print('\n-------------------------------------------')
  132. # print(i, j, idx_itr, dis)
  133. pool.close()
  134. pool.join()
  135. else:
  136. ged_vec = []
  137. n_edit_operations = []
  138. iterator = get_iters(range(len(graphs)), desc='computing GEDs', file=sys.stdout, length=len(graphs), verbose=verbose)
  139. for i in iterator:
  140. # for i in range(len(graphs)):
  141. for j in range(i + 1, len(graphs)):
  142. if nx.number_of_nodes(graphs[i]) <= nx.number_of_nodes(graphs[j]) or not sort:
  143. dis, pi_forward, pi_backward = _compute_ged(ged_env, listID[i], listID[j], graphs[i], graphs[j])
  144. else:
  145. dis, pi_backward, pi_forward = _compute_ged(ged_env, listID[j], listID[i], graphs[j], graphs[i])
  146. ged_vec.append(dis)
  147. ged_mat[i][j] = dis
  148. ged_mat[j][i] = dis
  149. n_eo_tmp = get_nb_edit_operations(graphs[i], graphs[j], pi_forward, pi_backward, **neo_options)
  150. n_edit_operations.append(n_eo_tmp)
  151. return ged_vec, ged_mat, n_edit_operations
  152. #%%
  153. def compute_geds(graphs,
  154. options={},
  155. sort=True,
  156. repeats=1,
  157. permute_nodes=False,
  158. random_state=None,
  159. parallel=False,
  160. n_jobs=None,
  161. verbose=True):
  162. """Compute graph edit distance matrix using GEDLIB.
  163. """
  164. if permute_nodes:
  165. return _compute_geds_with_permutation(graphs,
  166. options=options,
  167. sort=sort,
  168. repeats=repeats,
  169. random_state=random_state,
  170. parallel=parallel,
  171. n_jobs=n_jobs,
  172. verbose=verbose)
  173. else:
  174. return _compute_geds_without_permutation(graphs,
  175. options=options,
  176. sort=sort,
  177. repeats=repeats,
  178. parallel=parallel,
  179. n_jobs=n_jobs,
  180. verbose=verbose)
  181. #%%
  182. def _compute_geds_with_permutation(graphs,
  183. options={},
  184. sort=True,
  185. repeats=1,
  186. random_state=None,
  187. parallel=False,
  188. n_jobs=None,
  189. verbose=True):
  190. from gklearn.utils.utils import nx_permute_nodes
  191. # Initialze variables.
  192. ged_mat_optim = np.full((len(graphs), len(graphs)), np.inf)
  193. np.fill_diagonal(ged_mat_optim, 0)
  194. len_itr = int(len(graphs) * (len(graphs) - 1) / 2)
  195. ged_vec = [0] * len_itr
  196. n_edit_operations = [0] * len_itr
  197. # for each repeats:
  198. for i in range(0, repeats):
  199. # Permutate nodes.
  200. graphs_pmut = [nx_permute_nodes(g, random_state=random_state) for g in graphs]
  201. out = _compute_geds_without_permutation(graphs_pmut,
  202. options=options,
  203. sort=sort,
  204. repeats=1,
  205. parallel=parallel,
  206. n_jobs=n_jobs,
  207. verbose=verbose)
  208. # Compare current results with the best one.
  209. idx_cnt = 0
  210. for i in range(len(graphs)):
  211. for j in range(i + 1, len(graphs)):
  212. if out[1][i, j] < ged_mat_optim[i ,j]:
  213. ged_mat_optim[i, j] = out[1][i, j]
  214. ged_mat_optim[j, i] = out[1][j, i]
  215. ged_vec[idx_cnt] = out[0][idx_cnt]
  216. n_edit_operations[idx_cnt] = out[2][idx_cnt]
  217. idx_cnt += 1
  218. return ged_vec, ged_mat_optim, n_edit_operations
  219. def _compute_geds_without_permutation(graphs,
  220. options={},
  221. sort=True,
  222. repeats=1,
  223. parallel=False,
  224. n_jobs=None,
  225. verbose=True):
  226. from gklearn.gedlib import librariesImport, gedlibpy
  227. # initialize ged env.
  228. ged_env = gedlibpy.GEDEnv()
  229. ged_env.set_edit_cost(options['edit_cost'], edit_cost_constant=options['edit_cost_constants'])
  230. for g in graphs:
  231. ged_env.add_nx_graph(g, '')
  232. listID = ged_env.get_all_graph_ids()
  233. ged_env.init()
  234. if parallel:
  235. options['threads'] = 1
  236. ged_env.set_method(options['method'], ged_options_to_string(options))
  237. ged_env.init_method()
  238. # compute ged.
  239. neo_options = {'edit_cost': options['edit_cost'],
  240. 'node_labels': options['node_labels'], 'edge_labels': options['edge_labels'],
  241. 'node_attrs': options['node_attrs'], 'edge_attrs': options['edge_attrs']}
  242. ged_mat = np.zeros((len(graphs), len(graphs)))
  243. if parallel:
  244. len_itr = int(len(graphs) * (len(graphs) - 1) / 2)
  245. ged_vec = [0 for i in range(len_itr)]
  246. n_edit_operations = [0 for i in range(len_itr)]
  247. itr = combinations(range(0, len(graphs)), 2)
  248. if n_jobs is None:
  249. n_jobs = multiprocessing.cpu_count()
  250. if len_itr < 100 * n_jobs:
  251. chunksize = int(len_itr / n_jobs) + 1
  252. else:
  253. chunksize = 100
  254. def init_worker(graphs_toshare, ged_env_toshare, listID_toshare):
  255. global G_graphs, G_ged_env, G_listID
  256. G_graphs = graphs_toshare
  257. G_ged_env = ged_env_toshare
  258. G_listID = listID_toshare
  259. do_partial = partial(_wrapper_compute_ged_parallel, neo_options, sort, repeats)
  260. pool = Pool(processes=n_jobs, initializer=init_worker, initargs=(graphs, ged_env, listID))
  261. iterator = get_iters(pool.imap_unordered(do_partial, itr, chunksize), desc='computing GEDs', file=sys.stdout, length=len_itr, verbose=verbose)
  262. # iterator = pool.imap_unordered(do_partial, itr, chunksize)
  263. for i, j, dis, n_eo_tmp in iterator:
  264. idx_itr = int(len(graphs) * i + j - (i + 1) * (i + 2) / 2)
  265. ged_vec[idx_itr] = dis
  266. ged_mat[i][j] = dis
  267. ged_mat[j][i] = dis
  268. n_edit_operations[idx_itr] = n_eo_tmp
  269. # print('\n-------------------------------------------')
  270. # print(i, j, idx_itr, dis)
  271. pool.close()
  272. pool.join()
  273. else:
  274. ged_vec = []
  275. n_edit_operations = []
  276. iterator = get_iters(range(len(graphs)), desc='computing GEDs', file=sys.stdout, length=len(graphs), verbose=verbose)
  277. for i in iterator:
  278. # for i in range(len(graphs)):
  279. for j in range(i + 1, len(graphs)):
  280. if nx.number_of_nodes(graphs[i]) <= nx.number_of_nodes(graphs[j]) or not sort:
  281. dis, pi_forward, pi_backward = _compute_ged(ged_env, listID[i], listID[j], graphs[i], graphs[j], repeats)
  282. else:
  283. dis, pi_backward, pi_forward = _compute_ged(ged_env, listID[j], listID[i], graphs[j], graphs[i], repeats)
  284. ged_vec.append(dis)
  285. ged_mat[i][j] = dis
  286. ged_mat[j][i] = dis
  287. n_eo_tmp = get_nb_edit_operations(graphs[i], graphs[j], pi_forward, pi_backward, **neo_options)
  288. n_edit_operations.append(n_eo_tmp)
  289. return ged_vec, ged_mat, n_edit_operations
  290. def _wrapper_compute_ged_parallel(options, sort, repeats, itr):
  291. i = itr[0]
  292. j = itr[1]
  293. dis, n_eo_tmp = _compute_ged_parallel(G_ged_env, G_listID[i], G_listID[j], G_graphs[i], G_graphs[j], options, sort, repeats)
  294. return i, j, dis, n_eo_tmp
  295. def _compute_ged_parallel(env, gid1, gid2, g1, g2, options, sort, repeats):
  296. if nx.number_of_nodes(g1) <= nx.number_of_nodes(g2) or not sort:
  297. dis, pi_forward, pi_backward = _compute_ged(env, gid1, gid2, g1, g2, repeats)
  298. else:
  299. dis, pi_backward, pi_forward = _compute_ged(env, gid2, gid1, g2, g1, repeats)
  300. n_eo_tmp = get_nb_edit_operations(g1, g2, pi_forward, pi_backward, **options) # [0,0,0,0,0,0]
  301. return dis, n_eo_tmp
  302. def _compute_ged(env, gid1, gid2, g1, g2, repeats):
  303. dis_min = np.inf # @todo: maybe compare distance and then do others (faster).
  304. for i in range(0, repeats):
  305. env.run_method(gid1, gid2)
  306. pi_forward = env.get_forward_map(gid1, gid2)
  307. pi_backward = env.get_backward_map(gid1, gid2)
  308. upper = env.get_upper_bound(gid1, gid2)
  309. dis = upper
  310. # make the map label correct (label remove map as np.inf)
  311. # Attention: using node indices instead of NetworkX node labels (as
  312. # implemented here) may cause several issues:
  313. # - Fail if NetworkX node labels are not consecutive integers;
  314. # - Return wrong mappings if nodes are permutated (e.g., by using
  315. # `gklearn.utis.utils.nx_permute_nodes()`.)
  316. nodes1 = [n for n in g1.nodes()]
  317. nodes2 = [n for n in g2.nodes()]
  318. nb1 = nx.number_of_nodes(g1)
  319. nb2 = nx.number_of_nodes(g2)
  320. pi_forward = [nodes2[pi] if pi < nb2 else np.inf for pi in pi_forward]
  321. pi_backward = [nodes1[pi] if pi < nb1 else np.inf for pi in pi_backward]
  322. if dis < dis_min:
  323. dis_min = dis
  324. pi_forward_min = pi_forward
  325. pi_backward_min = pi_backward
  326. # print('-----')
  327. # print(pi_forward_min)
  328. # print(pi_backward_min)
  329. return dis_min, pi_forward_min, pi_backward_min
  330. #%%
  331. def get_nb_edit_operations(g1, g2, forward_map, backward_map, edit_cost=None, is_cml=False, **kwargs):
  332. """Calculate the numbers of the occurence of each edit operation in a given
  333. edit path.
  334. Parameters
  335. ----------
  336. g1 : TYPE
  337. DESCRIPTION.
  338. g2 : TYPE
  339. DESCRIPTION.
  340. forward_map : TYPE
  341. DESCRIPTION.
  342. backward_map : TYPE
  343. DESCRIPTION.
  344. edit_cost : TYPE, optional
  345. DESCRIPTION. The default is None.
  346. is_cml : TYPE, optional
  347. DESCRIPTION. The default is False.
  348. **kwargs : TYPE
  349. DESCRIPTION.
  350. Raises
  351. ------
  352. Exception
  353. DESCRIPTION.
  354. Returns
  355. -------
  356. TYPE
  357. DESCRIPTION.
  358. Notes
  359. -----
  360. Attention: when implementing a function to get the numbers of edit
  361. operations, make sure that:
  362. - It does not fail if NetworkX node labels are not consecutive integers;
  363. - It returns correct results if nodes are permutated (e.g., by using
  364. `gklearn.utis.utils.nx_permute_nodes()`.)
  365. Generally speaking, it means you need to distinguish the NetworkX label of
  366. a node from the position (index) of that node in the node list.
  367. """
  368. if is_cml:
  369. if edit_cost == 'CONSTANT':
  370. node_labels = kwargs.get('node_labels', [])
  371. edge_labels = kwargs.get('edge_labels', [])
  372. return get_nb_edit_operations_symbolic_cml(g1, g2, forward_map, backward_map,
  373. node_labels=node_labels, edge_labels=edge_labels)
  374. else:
  375. raise Exception('Edit cost "', edit_cost, '" is not supported.')
  376. else:
  377. if edit_cost == 'LETTER' or edit_cost == 'LETTER2':
  378. return get_nb_edit_operations_letter(g1, g2, forward_map, backward_map)
  379. elif edit_cost == 'NON_SYMBOLIC':
  380. node_attrs = kwargs.get('node_attrs', [])
  381. edge_attrs = kwargs.get('edge_attrs', [])
  382. return get_nb_edit_operations_nonsymbolic(g1, g2, forward_map, backward_map,
  383. node_attrs=node_attrs, edge_attrs=edge_attrs)
  384. elif edit_cost == 'CONSTANT':
  385. node_labels = kwargs.get('node_labels', [])
  386. edge_labels = kwargs.get('edge_labels', [])
  387. return get_nb_edit_operations_symbolic(g1, g2, forward_map, backward_map,
  388. node_labels=node_labels, edge_labels=edge_labels)
  389. else:
  390. return get_nb_edit_operations_symbolic(g1, g2, forward_map, backward_map)
  391. def get_nb_edit_operations_symbolic_cml(g1, g2, forward_map, backward_map,
  392. node_labels=[], edge_labels=[]):
  393. """Compute times that edit operations are used in an edit path for symbolic-labeled graphs, where the costs are different for each pair of nodes.
  394. Returns
  395. -------
  396. list
  397. A vector of numbers of times that costs bewteen labels are used in an edit path, formed in the order of node insertion costs, node deletion costs, node substitition costs, edge insertion costs, edge deletion costs, edge substitition costs. The dummy label is the first label, and the self label costs are not included.
  398. """
  399. # Initialize.
  400. nb_ops_node = np.zeros((1 + len(node_labels), 1 + len(node_labels)))
  401. nb_ops_edge = np.zeros((1 + len(edge_labels), 1 + len(edge_labels)))
  402. # For nodes.
  403. nodes1 = [n for n in g1.nodes()]
  404. for i, map_i in enumerate(forward_map):
  405. label1 = tuple(g1.nodes[nodes1[i]].items()) # @todo: order and faster
  406. idx_label1 = node_labels.index(label1) # @todo: faster
  407. if map_i == np.inf: # deletions.
  408. nb_ops_node[idx_label1 + 1, 0] += 1
  409. else: # substitutions.
  410. label2 = tuple(g2.nodes[map_i].items())
  411. if label1 != label2:
  412. idx_label2 = node_labels.index(label2) # @todo: faster
  413. nb_ops_node[idx_label1 + 1, idx_label2 + 1] += 1
  414. # insertions.
  415. nodes2 = [n for n in g2.nodes()]
  416. for i, map_i in enumerate(backward_map):
  417. if map_i == np.inf:
  418. label = tuple(g2.nodes[nodes2[i]].items())
  419. idx_label = node_labels.index(label) # @todo: faster
  420. nb_ops_node[0, idx_label + 1] += 1
  421. # For edges.
  422. edges1 = [e for e in g1.edges()]
  423. edges2_marked = []
  424. for nf1, nt1 in edges1:
  425. label1 = tuple(g1.edges[(nf1, nt1)].items())
  426. idx_label1 = edge_labels.index(label1) # @todo: faster
  427. idxf1 = nodes1.index(nf1) # @todo: faster
  428. idxt1 = nodes1.index(nt1) # @todo: faster
  429. # At least one of the nodes is removed, thus the edge is removed.
  430. if forward_map[idxf1] == np.inf or forward_map[idxt1] == np.inf:
  431. nb_ops_edge[idx_label1 + 1, 0] += 1
  432. # corresponding edge is in g2.
  433. else:
  434. nf2, nt2 = forward_map[idxf1], forward_map[idxt1]
  435. if (nf2, nt2) in g2.edges():
  436. edges2_marked.append((nf2, nt2))
  437. # If edge labels are different.
  438. label2 = tuple(g2.edges[(nf2, nt2)].items())
  439. if label1 != label2:
  440. idx_label2 = edge_labels.index(label2) # @todo: faster
  441. nb_ops_edge[idx_label1 + 1, idx_label2 + 1] += 1
  442. # Switch nf2 and nt2, for directed graphs.
  443. elif (nt2, nf2) in g2.edges():
  444. edges2_marked.append((nt2, nf2))
  445. # If edge labels are different.
  446. label2 = tuple(g2.edges[(nt2, nf2)].items())
  447. if label1 != label2:
  448. idx_label2 = edge_labels.index(label2) # @todo: faster
  449. nb_ops_edge[idx_label1 + 1, idx_label2 + 1] += 1
  450. # Corresponding nodes are in g2, however the edge is removed.
  451. else:
  452. nb_ops_edge[idx_label1 + 1, 0] += 1
  453. # insertions.
  454. for nt, nf in g2.edges():
  455. if (nt, nf) not in edges2_marked and (nf, nt) not in edges2_marked: # @todo: for directed.
  456. label = tuple(g2.edges[(nt, nf)].items())
  457. idx_label = edge_labels.index(label) # @todo: faster
  458. nb_ops_edge[0, idx_label + 1] += 1
  459. # Reform the numbers of edit oeprations into a vector.
  460. nb_eo_vector = []
  461. # node insertion.
  462. for i in range(1, len(nb_ops_node)):
  463. nb_eo_vector.append(nb_ops_node[0, i])
  464. # node deletion.
  465. for i in range(1, len(nb_ops_node)):
  466. nb_eo_vector.append(nb_ops_node[i, 0])
  467. # node substitution.
  468. for i in range(1, len(nb_ops_node)):
  469. for j in range(i + 1, len(nb_ops_node)):
  470. nb_eo_vector.append(nb_ops_node[i, j])
  471. # edge insertion.
  472. for i in range(1, len(nb_ops_edge)):
  473. nb_eo_vector.append(nb_ops_edge[0, i])
  474. # edge deletion.
  475. for i in range(1, len(nb_ops_edge)):
  476. nb_eo_vector.append(nb_ops_edge[i, 0])
  477. # edge substitution.
  478. for i in range(1, len(nb_ops_edge)):
  479. for j in range(i + 1, len(nb_ops_edge)):
  480. nb_eo_vector.append(nb_ops_edge[i, j])
  481. return nb_eo_vector
  482. def get_nb_edit_operations_symbolic(g1, g2, forward_map, backward_map,
  483. node_labels=[], edge_labels=[]):
  484. """Compute the number of each edit operations for symbolic-labeled graphs.
  485. """
  486. n_vi = 0
  487. n_vr = 0
  488. n_vs = 0
  489. n_ei = 0
  490. n_er = 0
  491. n_es = 0
  492. nodes1 = [n for n in g1.nodes()]
  493. for i, map_i in enumerate(forward_map):
  494. if map_i == np.inf:
  495. n_vr += 1
  496. else:
  497. for nl in node_labels:
  498. label1 = g1.nodes[nodes1[i]][nl]
  499. label2 = g2.nodes[map_i][nl]
  500. if label1 != label2:
  501. n_vs += 1
  502. break
  503. for map_i in backward_map:
  504. if map_i == np.inf:
  505. n_vi += 1
  506. # idx_nodes1 = range(0, len(node1))
  507. edges1 = [e for e in g1.edges()]
  508. nb_edges2_cnted = 0
  509. for n1, n2 in edges1:
  510. idx1 = nodes1.index(n1)
  511. idx2 = nodes1.index(n2)
  512. # one of the nodes is removed, thus the edge is removed.
  513. if forward_map[idx1] == np.inf or forward_map[idx2] == np.inf:
  514. n_er += 1
  515. # corresponding edge is in g2.
  516. elif (forward_map[idx1], forward_map[idx2]) in g2.edges():
  517. nb_edges2_cnted += 1
  518. # edge labels are different.
  519. for el in edge_labels:
  520. label1 = g2.edges[((forward_map[idx1], forward_map[idx2]))][el]
  521. label2 = g1.edges[(n1, n2)][el]
  522. if label1 != label2:
  523. n_es += 1
  524. break
  525. elif (forward_map[idx2], forward_map[idx1]) in g2.edges():
  526. nb_edges2_cnted += 1
  527. # edge labels are different.
  528. for el in edge_labels:
  529. label1 = g2.edges[((forward_map[idx2], forward_map[idx1]))][el]
  530. label2 = g1.edges[(n1, n2)][el]
  531. if label1 != label2:
  532. n_es += 1
  533. break
  534. # corresponding nodes are in g2, however the edge is removed.
  535. else:
  536. n_er += 1
  537. n_ei = nx.number_of_edges(g2) - nb_edges2_cnted
  538. return n_vi, n_vr, n_vs, n_ei, n_er, n_es
  539. def get_nb_edit_operations_letter(g1, g2, forward_map, backward_map):
  540. """Compute the number of each edit operations.
  541. """
  542. n_vi = 0
  543. n_vr = 0
  544. n_vs = 0
  545. sod_vs = 0
  546. n_ei = 0
  547. n_er = 0
  548. nodes1 = [n for n in g1.nodes()]
  549. for i, map_i in enumerate(forward_map):
  550. if map_i == np.inf:
  551. n_vr += 1
  552. else:
  553. n_vs += 1
  554. diff_x = float(g1.nodes[nodes1[i]]['x']) - float(g2.nodes[map_i]['x'])
  555. diff_y = float(g1.nodes[nodes1[i]]['y']) - float(g2.nodes[map_i]['y'])
  556. sod_vs += np.sqrt(np.square(diff_x) + np.square(diff_y))
  557. for map_i in backward_map:
  558. if map_i == np.inf:
  559. n_vi += 1
  560. # idx_nodes1 = range(0, len(node1))
  561. edges1 = [e for e in g1.edges()]
  562. nb_edges2_cnted = 0
  563. for n1, n2 in edges1:
  564. idx1 = nodes1.index(n1)
  565. idx2 = nodes1.index(n2)
  566. # one of the nodes is removed, thus the edge is removed.
  567. if forward_map[idx1] == np.inf or forward_map[idx2] == np.inf:
  568. n_er += 1
  569. # corresponding edge is in g2. Edge label is not considered.
  570. elif (forward_map[idx1], forward_map[idx2]) in g2.edges() or \
  571. (forward_map[idx2], forward_map[idx1]) in g2.edges():
  572. nb_edges2_cnted += 1
  573. # corresponding nodes are in g2, however the edge is removed.
  574. else:
  575. n_er += 1
  576. n_ei = nx.number_of_edges(g2) - nb_edges2_cnted
  577. return n_vi, n_vr, n_vs, sod_vs, n_ei, n_er
  578. def get_nb_edit_operations_nonsymbolic(g1, g2, forward_map, backward_map,
  579. node_attrs=[], edge_attrs=[]):
  580. """Compute the number of each edit operations.
  581. """
  582. n_vi = 0
  583. n_vr = 0
  584. n_vs = 0
  585. sod_vs = 0
  586. n_ei = 0
  587. n_er = 0
  588. n_es = 0
  589. sod_es = 0
  590. nodes1 = [n for n in g1.nodes()]
  591. for i, map_i in enumerate(forward_map):
  592. if map_i == np.inf:
  593. n_vr += 1
  594. else:
  595. n_vs += 1
  596. sum_squares = 0
  597. for a_name in node_attrs:
  598. diff = float(g1.nodes[nodes1[i]][a_name]) - float(g2.nodes[map_i][a_name])
  599. sum_squares += np.square(diff)
  600. sod_vs += np.sqrt(sum_squares)
  601. for map_i in backward_map:
  602. if map_i == np.inf:
  603. n_vi += 1
  604. # idx_nodes1 = range(0, len(node1))
  605. edges1 = [e for e in g1.edges()]
  606. for n1, n2 in edges1:
  607. idx1 = nodes1.index(n1)
  608. idx2 = nodes1.index(n2)
  609. n1_g2 = forward_map[idx1]
  610. n2_g2 = forward_map[idx2]
  611. # one of the nodes is removed, thus the edge is removed.
  612. if n1_g2 == np.inf or n2_g2 == np.inf:
  613. n_er += 1
  614. # corresponding edge is in g2.
  615. elif (n1_g2, n2_g2) in g2.edges():
  616. n_es += 1
  617. sum_squares = 0
  618. for a_name in edge_attrs:
  619. diff = float(g1.edges[n1, n2][a_name]) - float(g2.edges[n1_g2, n2_g2][a_name])
  620. sum_squares += np.square(diff)
  621. sod_es += np.sqrt(sum_squares)
  622. elif (n2_g2, n1_g2) in g2.edges():
  623. n_es += 1
  624. sum_squares = 0
  625. for a_name in edge_attrs:
  626. diff = float(g1.edges[n2, n1][a_name]) - float(g2.edges[n2_g2, n1_g2][a_name])
  627. sum_squares += np.square(diff)
  628. sod_es += np.sqrt(sum_squares)
  629. # corresponding nodes are in g2, however the edge is removed.
  630. else:
  631. n_er += 1
  632. n_ei = nx.number_of_edges(g2) - n_es
  633. return n_vi, n_vr, sod_vs, n_ei, n_er, sod_es
  634. #%%
  635. def label_costs_to_matrix(costs, nb_labels):
  636. """Reform a label cost vector to a matrix.
  637. Parameters
  638. ----------
  639. costs : numpy.array
  640. The vector containing costs between labels, in the order of node insertion costs, node deletion costs, node substitition costs, edge insertion costs, edge deletion costs, edge substitition costs.
  641. nb_labels : integer
  642. Number of labels.
  643. Returns
  644. -------
  645. cost_matrix : numpy.array.
  646. The reformed label cost matrix of size (nb_labels, nb_labels). Each row/column of cost_matrix corresponds to a label, and the first label is the dummy label. This is the same setting as in GEDData.
  647. """
  648. # Initialize label cost matrix.
  649. cost_matrix = np.zeros((nb_labels + 1, nb_labels + 1))
  650. i = 0
  651. # Costs of insertions.
  652. for col in range(1, nb_labels + 1):
  653. cost_matrix[0, col] = costs[i]
  654. i += 1
  655. # Costs of deletions.
  656. for row in range(1, nb_labels + 1):
  657. cost_matrix[row, 0] = costs[i]
  658. i += 1
  659. # Costs of substitutions.
  660. for row in range(1, nb_labels + 1):
  661. for col in range(row + 1, nb_labels + 1):
  662. cost_matrix[row, col] = costs[i]
  663. cost_matrix[col, row] = costs[i]
  664. i += 1
  665. return cost_matrix
  666. #%%
  667. def ged_options_to_string(options):
  668. opt_str = ' '
  669. for key, val in options.items():
  670. if key == 'initialization_method':
  671. opt_str += '--initialization-method ' + str(val) + ' '
  672. elif key == 'initialization_options':
  673. opt_str += '--initialization-options ' + str(val) + ' '
  674. elif key == 'lower_bound_method':
  675. opt_str += '--lower-bound-method ' + str(val) + ' '
  676. elif key == 'random_substitution_ratio':
  677. opt_str += '--random-substitution-ratio ' + str(val) + ' '
  678. elif key == 'initial_solutions':
  679. opt_str += '--initial-solutions ' + str(val) + ' '
  680. elif key == 'ratio_runs_from_initial_solutions':
  681. opt_str += '--ratio-runs-from-initial-solutions ' + str(val) + ' '
  682. elif key == 'threads':
  683. opt_str += '--threads ' + str(val) + ' '
  684. elif key == 'num_randpost_loops':
  685. opt_str += '--num-randpost-loops ' + str(val) + ' '
  686. elif key == 'max_randpost_retrials':
  687. opt_str += '--maxrandpost-retrials ' + str(val) + ' '
  688. elif key == 'randpost_penalty':
  689. opt_str += '--randpost-penalty ' + str(val) + ' '
  690. elif key == 'randpost_decay':
  691. opt_str += '--randpost-decay ' + str(val) + ' '
  692. elif key == 'log':
  693. opt_str += '--log ' + str(val) + ' '
  694. elif key == 'randomness':
  695. opt_str += '--randomness ' + str(val) + ' '
  696. # if not isinstance(val, list):
  697. # opt_str += '--' + key.replace('_', '-') + ' '
  698. # if val == False:
  699. # val_str = 'FALSE'
  700. # else:
  701. # val_str = str(val)
  702. # opt_str += val_str + ' '
  703. return opt_str

A Python package for graph kernels, graph edit distances and graph pre-image problem.