You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

util.py 22 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. """
  4. Created on Tue Mar 31 17:06:22 2020
  5. @author: ljia
  6. """
  7. import numpy as np
  8. from itertools import combinations
  9. import multiprocessing
  10. from multiprocessing import Pool
  11. from functools import partial
  12. import sys
  13. from tqdm import tqdm
  14. import networkx as nx
  15. from gklearn.ged.env import GEDEnv
  16. def compute_ged(g1, g2, options):
  17. from gklearn.gedlib import librariesImport, gedlibpy
  18. ged_env = gedlibpy.GEDEnv()
  19. ged_env.set_edit_cost(options['edit_cost'], edit_cost_constant=options['edit_cost_constants'])
  20. ged_env.add_nx_graph(g1, '')
  21. ged_env.add_nx_graph(g2, '')
  22. listID = ged_env.get_all_graph_ids()
  23. ged_env.init(init_type=options['init_option'])
  24. ged_env.set_method(options['method'], ged_options_to_string(options))
  25. ged_env.init_method()
  26. g = listID[0]
  27. h = listID[1]
  28. ged_env.run_method(g, h)
  29. pi_forward = ged_env.get_forward_map(g, h)
  30. pi_backward = ged_env.get_backward_map(g, h)
  31. upper = ged_env.get_upper_bound(g, h)
  32. dis = upper
  33. # make the map label correct (label remove map as np.inf)
  34. nodes1 = [n for n in g1.nodes()]
  35. nodes2 = [n for n in g2.nodes()]
  36. nb1 = nx.number_of_nodes(g1)
  37. nb2 = nx.number_of_nodes(g2)
  38. pi_forward = [nodes2[pi] if pi < nb2 else np.inf for pi in pi_forward]
  39. pi_backward = [nodes1[pi] if pi < nb1 else np.inf for pi in pi_backward]
  40. # print(pi_forward)
  41. return dis, pi_forward, pi_backward
  42. def compute_geds_cml(graphs, options={}, sort=True, parallel=False, verbose=True):
  43. # initialize ged env.
  44. ged_env = GEDEnv()
  45. ged_env.set_edit_cost(options['edit_cost'], edit_cost_constants=options['edit_cost_constants'])
  46. for g in graphs:
  47. ged_env.add_nx_graph(g, '')
  48. listID = ged_env.get_all_graph_ids()
  49. node_labels = ged_env.get_all_node_labels()
  50. edge_labels = ged_env.get_all_edge_labels()
  51. node_label_costs = label_costs_to_matrix(options['node_label_costs'], len(node_labels)) if 'node_label_costs' in options else None
  52. edge_label_costs = label_costs_to_matrix(options['edge_label_costs'], len(edge_labels)) if 'edge_label_costs' in options else None
  53. ged_env.set_label_costs(node_label_costs, edge_label_costs)
  54. ged_env.init(init_type=options['init_option'])
  55. if parallel:
  56. options['threads'] = 1
  57. ged_env.set_method(options['method'], options)
  58. ged_env.init_method()
  59. # compute ged.
  60. # options used to compute numbers of edit operations.
  61. if node_label_costs is None and edge_label_costs is None:
  62. neo_options = {'edit_cost': options['edit_cost'],
  63. 'is_cml': False,
  64. 'node_labels': options['node_labels'], 'edge_labels': options['edge_labels'],
  65. 'node_attrs': options['node_attrs'], 'edge_attrs': options['edge_attrs']}
  66. else:
  67. neo_options = {'edit_cost': options['edit_cost'],
  68. 'is_cml': True,
  69. 'node_labels': node_labels,
  70. 'edge_labels': edge_labels}
  71. ged_mat = np.zeros((len(graphs), len(graphs)))
  72. if parallel:
  73. len_itr = int(len(graphs) * (len(graphs) - 1) / 2)
  74. ged_vec = [0 for i in range(len_itr)]
  75. n_edit_operations = [0 for i in range(len_itr)]
  76. itr = combinations(range(0, len(graphs)), 2)
  77. n_jobs = multiprocessing.cpu_count()
  78. if len_itr < 100 * n_jobs:
  79. chunksize = int(len_itr / n_jobs) + 1
  80. else:
  81. chunksize = 100
  82. def init_worker(graphs_toshare, ged_env_toshare, listID_toshare):
  83. global G_graphs, G_ged_env, G_listID
  84. G_graphs = graphs_toshare
  85. G_ged_env = ged_env_toshare
  86. G_listID = listID_toshare
  87. do_partial = partial(_wrapper_compute_ged_parallel, neo_options, sort)
  88. pool = Pool(processes=n_jobs, initializer=init_worker, initargs=(graphs, ged_env, listID))
  89. if verbose:
  90. iterator = tqdm(pool.imap_unordered(do_partial, itr, chunksize),
  91. desc='computing GEDs', file=sys.stdout)
  92. else:
  93. iterator = pool.imap_unordered(do_partial, itr, chunksize)
  94. # iterator = pool.imap_unordered(do_partial, itr, chunksize)
  95. for i, j, dis, n_eo_tmp in iterator:
  96. idx_itr = int(len(graphs) * i + j - (i + 1) * (i + 2) / 2)
  97. ged_vec[idx_itr] = dis
  98. ged_mat[i][j] = dis
  99. ged_mat[j][i] = dis
  100. n_edit_operations[idx_itr] = n_eo_tmp
  101. # print('\n-------------------------------------------')
  102. # print(i, j, idx_itr, dis)
  103. pool.close()
  104. pool.join()
  105. else:
  106. ged_vec = []
  107. n_edit_operations = []
  108. if verbose:
  109. iterator = tqdm(range(len(graphs)), desc='computing GEDs', file=sys.stdout)
  110. else:
  111. iterator = range(len(graphs))
  112. for i in iterator:
  113. # for i in range(len(graphs)):
  114. for j in range(i + 1, len(graphs)):
  115. if nx.number_of_nodes(graphs[i]) <= nx.number_of_nodes(graphs[j]) or not sort:
  116. dis, pi_forward, pi_backward = _compute_ged(ged_env, listID[i], listID[j], graphs[i], graphs[j])
  117. else:
  118. dis, pi_backward, pi_forward = _compute_ged(ged_env, listID[j], listID[i], graphs[j], graphs[i])
  119. ged_vec.append(dis)
  120. ged_mat[i][j] = dis
  121. ged_mat[j][i] = dis
  122. n_eo_tmp = get_nb_edit_operations(graphs[i], graphs[j], pi_forward, pi_backward, **neo_options)
  123. n_edit_operations.append(n_eo_tmp)
  124. return ged_vec, ged_mat, n_edit_operations
  125. def compute_geds(graphs, options={}, sort=True, trial=1, parallel=False, verbose=True):
  126. from gklearn.gedlib import librariesImport, gedlibpy
  127. # initialize ged env.
  128. ged_env = gedlibpy.GEDEnv()
  129. ged_env.set_edit_cost(options['edit_cost'], edit_cost_constant=options['edit_cost_constants'])
  130. for g in graphs:
  131. ged_env.add_nx_graph(g, '')
  132. listID = ged_env.get_all_graph_ids()
  133. ged_env.init()
  134. if parallel:
  135. options['threads'] = 1
  136. ged_env.set_method(options['method'], ged_options_to_string(options))
  137. ged_env.init_method()
  138. # compute ged.
  139. neo_options = {'edit_cost': options['edit_cost'],
  140. 'node_labels': options['node_labels'], 'edge_labels': options['edge_labels'],
  141. 'node_attrs': options['node_attrs'], 'edge_attrs': options['edge_attrs']}
  142. ged_mat = np.zeros((len(graphs), len(graphs)))
  143. if parallel:
  144. len_itr = int(len(graphs) * (len(graphs) - 1) / 2)
  145. ged_vec = [0 for i in range(len_itr)]
  146. n_edit_operations = [0 for i in range(len_itr)]
  147. itr = combinations(range(0, len(graphs)), 2)
  148. n_jobs = multiprocessing.cpu_count()
  149. if len_itr < 100 * n_jobs:
  150. chunksize = int(len_itr / n_jobs) + 1
  151. else:
  152. chunksize = 100
  153. def init_worker(graphs_toshare, ged_env_toshare, listID_toshare):
  154. global G_graphs, G_ged_env, G_listID
  155. G_graphs = graphs_toshare
  156. G_ged_env = ged_env_toshare
  157. G_listID = listID_toshare
  158. do_partial = partial(_wrapper_compute_ged_parallel, neo_options, sort, trial)
  159. pool = Pool(processes=n_jobs, initializer=init_worker, initargs=(graphs, ged_env, listID))
  160. if verbose:
  161. iterator = tqdm(pool.imap_unordered(do_partial, itr, chunksize),
  162. desc='computing GEDs', file=sys.stdout)
  163. else:
  164. iterator = pool.imap_unordered(do_partial, itr, chunksize)
  165. # iterator = pool.imap_unordered(do_partial, itr, chunksize)
  166. for i, j, dis, n_eo_tmp in iterator:
  167. idx_itr = int(len(graphs) * i + j - (i + 1) * (i + 2) / 2)
  168. ged_vec[idx_itr] = dis
  169. ged_mat[i][j] = dis
  170. ged_mat[j][i] = dis
  171. n_edit_operations[idx_itr] = n_eo_tmp
  172. # print('\n-------------------------------------------')
  173. # print(i, j, idx_itr, dis)
  174. pool.close()
  175. pool.join()
  176. else:
  177. ged_vec = []
  178. n_edit_operations = []
  179. if verbose:
  180. iterator = tqdm(range(len(graphs)), desc='computing GEDs', file=sys.stdout)
  181. else:
  182. iterator = range(len(graphs))
  183. for i in iterator:
  184. # for i in range(len(graphs)):
  185. for j in range(i + 1, len(graphs)):
  186. if nx.number_of_nodes(graphs[i]) <= nx.number_of_nodes(graphs[j]) or not sort:
  187. dis, pi_forward, pi_backward = _compute_ged(ged_env, listID[i], listID[j], graphs[i], graphs[j], trial)
  188. else:
  189. dis, pi_backward, pi_forward = _compute_ged(ged_env, listID[j], listID[i], graphs[j], graphs[i], trial)
  190. ged_vec.append(dis)
  191. ged_mat[i][j] = dis
  192. ged_mat[j][i] = dis
  193. n_eo_tmp = get_nb_edit_operations(graphs[i], graphs[j], pi_forward, pi_backward, **neo_options)
  194. n_edit_operations.append(n_eo_tmp)
  195. return ged_vec, ged_mat, n_edit_operations
  196. def _wrapper_compute_ged_parallel(options, sort, trial, itr):
  197. i = itr[0]
  198. j = itr[1]
  199. dis, n_eo_tmp = _compute_ged_parallel(G_ged_env, G_listID[i], G_listID[j], G_graphs[i], G_graphs[j], options, sort, trial)
  200. return i, j, dis, n_eo_tmp
  201. def _compute_ged_parallel(env, gid1, gid2, g1, g2, options, sort, trial):
  202. if nx.number_of_nodes(g1) <= nx.number_of_nodes(g2) or not sort:
  203. dis, pi_forward, pi_backward = _compute_ged(env, gid1, gid2, g1, g2, trial)
  204. else:
  205. dis, pi_backward, pi_forward = _compute_ged(env, gid2, gid1, g2, g1, trial)
  206. n_eo_tmp = get_nb_edit_operations(g1, g2, pi_forward, pi_backward, **options) # [0,0,0,0,0,0]
  207. return dis, n_eo_tmp
  208. def _compute_ged(env, gid1, gid2, g1, g2, trial):
  209. dis_min = np.inf
  210. for i in range(0, trial):
  211. env.run_method(gid1, gid2)
  212. pi_forward = env.get_forward_map(gid1, gid2)
  213. pi_backward = env.get_backward_map(gid1, gid2)
  214. upper = env.get_upper_bound(gid1, gid2)
  215. dis = upper
  216. # make the map label correct (label remove map as np.inf)
  217. nodes1 = [n for n in g1.nodes()]
  218. nodes2 = [n for n in g2.nodes()]
  219. nb1 = nx.number_of_nodes(g1)
  220. nb2 = nx.number_of_nodes(g2)
  221. pi_forward = [nodes2[pi] if pi < nb2 else np.inf for pi in pi_forward]
  222. pi_backward = [nodes1[pi] if pi < nb1 else np.inf for pi in pi_backward]
  223. if dis < dis_min:
  224. dis_min = dis
  225. pi_forward_min = pi_forward
  226. pi_backward_min = pi_backward
  227. return dis_min, pi_forward_min, pi_backward_min
  228. def label_costs_to_matrix(costs, nb_labels):
  229. """Reform a label cost vector to a matrix.
  230. Parameters
  231. ----------
  232. costs : numpy.array
  233. The vector containing costs between labels, in the order of node insertion costs, node deletion costs, node substitition costs, edge insertion costs, edge deletion costs, edge substitition costs.
  234. nb_labels : integer
  235. Number of labels.
  236. Returns
  237. -------
  238. cost_matrix : numpy.array.
  239. The reformed label cost matrix of size (nb_labels, nb_labels). Each row/column of cost_matrix corresponds to a label, and the first label is the dummy label. This is the same setting as in GEDData.
  240. """
  241. # Initialize label cost matrix.
  242. cost_matrix = np.zeros((nb_labels + 1, nb_labels + 1))
  243. i = 0
  244. # Costs of insertions.
  245. for col in range(1, nb_labels + 1):
  246. cost_matrix[0, col] = costs[i]
  247. i += 1
  248. # Costs of deletions.
  249. for row in range(1, nb_labels + 1):
  250. cost_matrix[row, 0] = costs[i]
  251. i += 1
  252. # Costs of substitutions.
  253. for row in range(1, nb_labels + 1):
  254. for col in range(row + 1, nb_labels + 1):
  255. cost_matrix[row, col] = costs[i]
  256. cost_matrix[col, row] = costs[i]
  257. i += 1
  258. return cost_matrix
  259. def get_nb_edit_operations(g1, g2, forward_map, backward_map, edit_cost=None, is_cml=False, **kwargs):
  260. if is_cml:
  261. if edit_cost == 'CONSTANT':
  262. node_labels = kwargs.get('node_labels', [])
  263. edge_labels = kwargs.get('edge_labels', [])
  264. return get_nb_edit_operations_symbolic_cml(g1, g2, forward_map, backward_map,
  265. node_labels=node_labels, edge_labels=edge_labels)
  266. else:
  267. raise Exception('Edit cost "', edit_cost, '" is not supported.')
  268. else:
  269. if edit_cost == 'LETTER' or edit_cost == 'LETTER2':
  270. return get_nb_edit_operations_letter(g1, g2, forward_map, backward_map)
  271. elif edit_cost == 'NON_SYMBOLIC':
  272. node_attrs = kwargs.get('node_attrs', [])
  273. edge_attrs = kwargs.get('edge_attrs', [])
  274. return get_nb_edit_operations_nonsymbolic(g1, g2, forward_map, backward_map,
  275. node_attrs=node_attrs, edge_attrs=edge_attrs)
  276. elif edit_cost == 'CONSTANT':
  277. node_labels = kwargs.get('node_labels', [])
  278. edge_labels = kwargs.get('edge_labels', [])
  279. return get_nb_edit_operations_symbolic(g1, g2, forward_map, backward_map,
  280. node_labels=node_labels, edge_labels=edge_labels)
  281. else:
  282. return get_nb_edit_operations_symbolic(g1, g2, forward_map, backward_map)
  283. def get_nb_edit_operations_symbolic_cml(g1, g2, forward_map, backward_map,
  284. node_labels=[], edge_labels=[]):
  285. """Compute times that edit operations are used in an edit path for symbolic-labeled graphs, where the costs are different for each pair of nodes.
  286. Returns
  287. -------
  288. list
  289. A vector of numbers of times that costs bewteen labels are used in an edit path, formed in the order of node insertion costs, node deletion costs, node substitition costs, edge insertion costs, edge deletion costs, edge substitition costs. The dummy label is the first label, and the self label costs are not included.
  290. """
  291. # Initialize.
  292. nb_ops_node = np.zeros((1 + len(node_labels), 1 + len(node_labels)))
  293. nb_ops_edge = np.zeros((1 + len(edge_labels), 1 + len(edge_labels)))
  294. # For nodes.
  295. nodes1 = [n for n in g1.nodes()]
  296. for i, map_i in enumerate(forward_map):
  297. label1 = tuple(g1.nodes[nodes1[i]].items()) # @todo: order and faster
  298. idx_label1 = node_labels.index(label1) # @todo: faster
  299. if map_i == np.inf: # deletions.
  300. nb_ops_node[idx_label1 + 1, 0] += 1
  301. else: # substitutions.
  302. label2 = tuple(g2.nodes[map_i].items())
  303. if label1 != label2:
  304. idx_label2 = node_labels.index(label2) # @todo: faster
  305. nb_ops_node[idx_label1 + 1, idx_label2 + 1] += 1
  306. # insertions.
  307. nodes2 = [n for n in g2.nodes()]
  308. for i, map_i in enumerate(backward_map):
  309. if map_i == np.inf:
  310. label = tuple(g2.nodes[nodes2[i]].items())
  311. idx_label = node_labels.index(label) # @todo: faster
  312. nb_ops_node[0, idx_label + 1] += 1
  313. # For edges.
  314. edges1 = [e for e in g1.edges()]
  315. edges2_marked = []
  316. for nf1, nt1 in edges1:
  317. label1 = tuple(g1.edges[(nf1, nt1)].items())
  318. idx_label1 = edge_labels.index(label1) # @todo: faster
  319. idxf1 = nodes1.index(nf1) # @todo: faster
  320. idxt1 = nodes1.index(nt1) # @todo: faster
  321. # At least one of the nodes is removed, thus the edge is removed.
  322. if forward_map[idxf1] == np.inf or forward_map[idxt1] == np.inf:
  323. nb_ops_edge[idx_label1 + 1, 0] += 1
  324. # corresponding edge is in g2.
  325. else:
  326. nf2, nt2 = forward_map[idxf1], forward_map[idxt1]
  327. if (nf2, nt2) in g2.edges():
  328. edges2_marked.append((nf2, nt2))
  329. # If edge labels are different.
  330. label2 = tuple(g2.edges[(nf2, nt2)].items())
  331. if label1 != label2:
  332. idx_label2 = edge_labels.index(label2) # @todo: faster
  333. nb_ops_edge[idx_label1 + 1, idx_label2 + 1] += 1
  334. # Switch nf2 and nt2, for directed graphs.
  335. elif (nt2, nf2) in g2.edges():
  336. edges2_marked.append((nt2, nf2))
  337. # If edge labels are different.
  338. label2 = tuple(g2.edges[(nt2, nf2)].items())
  339. if label1 != label2:
  340. idx_label2 = edge_labels.index(label2) # @todo: faster
  341. nb_ops_edge[idx_label1 + 1, idx_label2 + 1] += 1
  342. # Corresponding nodes are in g2, however the edge is removed.
  343. else:
  344. nb_ops_edge[idx_label1 + 1, 0] += 1
  345. # insertions.
  346. for nt, nf in g2.edges():
  347. if (nt, nf) not in edges2_marked and (nf, nt) not in edges2_marked: # @todo: for directed.
  348. label = tuple(g2.edges[(nt, nf)].items())
  349. idx_label = edge_labels.index(label) # @todo: faster
  350. nb_ops_edge[0, idx_label + 1] += 1
  351. # Reform the numbers of edit oeprations into a vector.
  352. nb_eo_vector = []
  353. # node insertion.
  354. for i in range(1, len(nb_ops_node)):
  355. nb_eo_vector.append(nb_ops_node[0, i])
  356. # node deletion.
  357. for i in range(1, len(nb_ops_node)):
  358. nb_eo_vector.append(nb_ops_node[i, 0])
  359. # node substitution.
  360. for i in range(1, len(nb_ops_node)):
  361. for j in range(i + 1, len(nb_ops_node)):
  362. nb_eo_vector.append(nb_ops_node[i, j])
  363. # edge insertion.
  364. for i in range(1, len(nb_ops_edge)):
  365. nb_eo_vector.append(nb_ops_edge[0, i])
  366. # edge deletion.
  367. for i in range(1, len(nb_ops_edge)):
  368. nb_eo_vector.append(nb_ops_edge[i, 0])
  369. # edge substitution.
  370. for i in range(1, len(nb_ops_edge)):
  371. for j in range(i + 1, len(nb_ops_edge)):
  372. nb_eo_vector.append(nb_ops_edge[i, j])
  373. return nb_eo_vector
  374. def get_nb_edit_operations_symbolic(g1, g2, forward_map, backward_map,
  375. node_labels=[], edge_labels=[]):
  376. """Compute the number of each edit operations for symbolic-labeled graphs.
  377. """
  378. n_vi = 0
  379. n_vr = 0
  380. n_vs = 0
  381. n_ei = 0
  382. n_er = 0
  383. n_es = 0
  384. nodes1 = [n for n in g1.nodes()]
  385. for i, map_i in enumerate(forward_map):
  386. if map_i == np.inf:
  387. n_vr += 1
  388. else:
  389. for nl in node_labels:
  390. label1 = g1.nodes[nodes1[i]][nl]
  391. label2 = g2.nodes[map_i][nl]
  392. if label1 != label2:
  393. n_vs += 1
  394. break
  395. for map_i in backward_map:
  396. if map_i == np.inf:
  397. n_vi += 1
  398. # idx_nodes1 = range(0, len(node1))
  399. edges1 = [e for e in g1.edges()]
  400. nb_edges2_cnted = 0
  401. for n1, n2 in edges1:
  402. idx1 = nodes1.index(n1)
  403. idx2 = nodes1.index(n2)
  404. # one of the nodes is removed, thus the edge is removed.
  405. if forward_map[idx1] == np.inf or forward_map[idx2] == np.inf:
  406. n_er += 1
  407. # corresponding edge is in g2.
  408. elif (forward_map[idx1], forward_map[idx2]) in g2.edges():
  409. nb_edges2_cnted += 1
  410. # edge labels are different.
  411. for el in edge_labels:
  412. label1 = g2.edges[((forward_map[idx1], forward_map[idx2]))][el]
  413. label2 = g1.edges[(n1, n2)][el]
  414. if label1 != label2:
  415. n_es += 1
  416. break
  417. elif (forward_map[idx2], forward_map[idx1]) in g2.edges():
  418. nb_edges2_cnted += 1
  419. # edge labels are different.
  420. for el in edge_labels:
  421. label1 = g2.edges[((forward_map[idx2], forward_map[idx1]))][el]
  422. label2 = g1.edges[(n1, n2)][el]
  423. if label1 != label2:
  424. n_es += 1
  425. break
  426. # corresponding nodes are in g2, however the edge is removed.
  427. else:
  428. n_er += 1
  429. n_ei = nx.number_of_edges(g2) - nb_edges2_cnted
  430. return n_vi, n_vr, n_vs, n_ei, n_er, n_es
  431. def get_nb_edit_operations_letter(g1, g2, forward_map, backward_map):
  432. """Compute the number of each edit operations.
  433. """
  434. n_vi = 0
  435. n_vr = 0
  436. n_vs = 0
  437. sod_vs = 0
  438. n_ei = 0
  439. n_er = 0
  440. nodes1 = [n for n in g1.nodes()]
  441. for i, map_i in enumerate(forward_map):
  442. if map_i == np.inf:
  443. n_vr += 1
  444. else:
  445. n_vs += 1
  446. diff_x = float(g1.nodes[nodes1[i]]['x']) - float(g2.nodes[map_i]['x'])
  447. diff_y = float(g1.nodes[nodes1[i]]['y']) - float(g2.nodes[map_i]['y'])
  448. sod_vs += np.sqrt(np.square(diff_x) + np.square(diff_y))
  449. for map_i in backward_map:
  450. if map_i == np.inf:
  451. n_vi += 1
  452. # idx_nodes1 = range(0, len(node1))
  453. edges1 = [e for e in g1.edges()]
  454. nb_edges2_cnted = 0
  455. for n1, n2 in edges1:
  456. idx1 = nodes1.index(n1)
  457. idx2 = nodes1.index(n2)
  458. # one of the nodes is removed, thus the edge is removed.
  459. if forward_map[idx1] == np.inf or forward_map[idx2] == np.inf:
  460. n_er += 1
  461. # corresponding edge is in g2. Edge label is not considered.
  462. elif (forward_map[idx1], forward_map[idx2]) in g2.edges() or \
  463. (forward_map[idx2], forward_map[idx1]) in g2.edges():
  464. nb_edges2_cnted += 1
  465. # corresponding nodes are in g2, however the edge is removed.
  466. else:
  467. n_er += 1
  468. n_ei = nx.number_of_edges(g2) - nb_edges2_cnted
  469. return n_vi, n_vr, n_vs, sod_vs, n_ei, n_er
  470. def get_nb_edit_operations_nonsymbolic(g1, g2, forward_map, backward_map,
  471. node_attrs=[], edge_attrs=[]):
  472. """Compute the number of each edit operations.
  473. """
  474. n_vi = 0
  475. n_vr = 0
  476. n_vs = 0
  477. sod_vs = 0
  478. n_ei = 0
  479. n_er = 0
  480. n_es = 0
  481. sod_es = 0
  482. nodes1 = [n for n in g1.nodes()]
  483. for i, map_i in enumerate(forward_map):
  484. if map_i == np.inf:
  485. n_vr += 1
  486. else:
  487. n_vs += 1
  488. sum_squares = 0
  489. for a_name in node_attrs:
  490. diff = float(g1.nodes[nodes1[i]][a_name]) - float(g2.nodes[map_i][a_name])
  491. sum_squares += np.square(diff)
  492. sod_vs += np.sqrt(sum_squares)
  493. for map_i in backward_map:
  494. if map_i == np.inf:
  495. n_vi += 1
  496. # idx_nodes1 = range(0, len(node1))
  497. edges1 = [e for e in g1.edges()]
  498. for n1, n2 in edges1:
  499. idx1 = nodes1.index(n1)
  500. idx2 = nodes1.index(n2)
  501. n1_g2 = forward_map[idx1]
  502. n2_g2 = forward_map[idx2]
  503. # one of the nodes is removed, thus the edge is removed.
  504. if n1_g2 == np.inf or n2_g2 == np.inf:
  505. n_er += 1
  506. # corresponding edge is in g2.
  507. elif (n1_g2, n2_g2) in g2.edges():
  508. n_es += 1
  509. sum_squares = 0
  510. for a_name in edge_attrs:
  511. diff = float(g1.edges[n1, n2][a_name]) - float(g2.edges[n1_g2, n2_g2][a_name])
  512. sum_squares += np.square(diff)
  513. sod_es += np.sqrt(sum_squares)
  514. elif (n2_g2, n1_g2) in g2.edges():
  515. n_es += 1
  516. sum_squares = 0
  517. for a_name in edge_attrs:
  518. diff = float(g1.edges[n2, n1][a_name]) - float(g2.edges[n2_g2, n1_g2][a_name])
  519. sum_squares += np.square(diff)
  520. sod_es += np.sqrt(sum_squares)
  521. # corresponding nodes are in g2, however the edge is removed.
  522. else:
  523. n_er += 1
  524. n_ei = nx.number_of_edges(g2) - n_es
  525. return n_vi, n_vr, sod_vs, n_ei, n_er, sod_es
  526. def ged_options_to_string(options):
  527. opt_str = ' '
  528. for key, val in options.items():
  529. if key == 'initialization_method':
  530. opt_str += '--initialization-method ' + str(val) + ' '
  531. elif key == 'initialization_options':
  532. opt_str += '--initialization-options ' + str(val) + ' '
  533. elif key == 'lower_bound_method':
  534. opt_str += '--lower-bound-method ' + str(val) + ' '
  535. elif key == 'random_substitution_ratio':
  536. opt_str += '--random-substitution-ratio ' + str(val) + ' '
  537. elif key == 'initial_solutions':
  538. opt_str += '--initial-solutions ' + str(val) + ' '
  539. elif key == 'ratio_runs_from_initial_solutions':
  540. opt_str += '--ratio-runs-from-initial-solutions ' + str(val) + ' '
  541. elif key == 'threads':
  542. opt_str += '--threads ' + str(val) + ' '
  543. elif key == 'num_randpost_loops':
  544. opt_str += '--num-randpost-loops ' + str(val) + ' '
  545. elif key == 'max_randpost_retrials':
  546. opt_str += '--maxrandpost-retrials ' + str(val) + ' '
  547. elif key == 'randpost_penalty':
  548. opt_str += '--randpost-penalty ' + str(val) + ' '
  549. elif key == 'randpost_decay':
  550. opt_str += '--randpost-decay ' + str(val) + ' '
  551. elif key == 'log':
  552. opt_str += '--log ' + str(val) + ' '
  553. elif key == 'randomness':
  554. opt_str += '--randomness ' + str(val) + ' '
  555. # if not isinstance(val, list):
  556. # opt_str += '--' + key.replace('_', '-') + ' '
  557. # if val == False:
  558. # val_str = 'FALSE'
  559. # else:
  560. # val_str = str(val)
  561. # opt_str += val_str + ' '
  562. return opt_str

A Python package for graph kernels, graph edit distances and graph pre-image problem.