You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

median_graph_estimator.py 53 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. """
  4. Created on Mon Mar 16 18:04:55 2020
  5. @author: ljia
  6. """
  7. import numpy as np
  8. from gklearn.ged.env import AlgorithmState, NodeMap
  9. from gklearn.ged.util import misc
  10. from gklearn.utils import Timer
  11. import time
  12. from tqdm import tqdm
  13. import sys
  14. import networkx as nx
  15. class MedianGraphEstimator(object):
  16. def __init__(self, ged_env, constant_node_costs):
  17. """Constructor.
  18. Parameters
  19. ----------
  20. ged_env : gklearn.gedlib.gedlibpy.GEDEnv
  21. Initialized GED environment. The edit costs must be set by the user.
  22. constant_node_costs : Boolean
  23. Set to True if the node relabeling costs are constant.
  24. """
  25. self.__ged_env = ged_env
  26. self.__init_method = 'BRANCH_FAST'
  27. self.__init_options = ''
  28. self.__descent_method = 'BRANCH_FAST'
  29. self.__descent_options = ''
  30. self.__refine_method = 'IPFP'
  31. self.__refine_options = ''
  32. self.__constant_node_costs = constant_node_costs
  33. self.__labeled_nodes = (ged_env.get_num_node_labels() > 1)
  34. self.__node_del_cost = ged_env.get_node_del_cost(ged_env.get_node_label(1))
  35. self.__node_ins_cost = ged_env.get_node_ins_cost(ged_env.get_node_label(1))
  36. self.__labeled_edges = (ged_env.get_num_edge_labels() > 1)
  37. self.__edge_del_cost = ged_env.get_edge_del_cost(ged_env.get_edge_label(1))
  38. self.__edge_ins_cost = ged_env.get_edge_ins_cost(ged_env.get_edge_label(1))
  39. self.__init_type = 'RANDOM'
  40. self.__num_random_inits = 10
  41. self.__desired_num_random_inits = 10
  42. self.__use_real_randomness = True
  43. self.__seed = 0
  44. self.__update_order = True
  45. self.__refine = True
  46. self.__time_limit_in_sec = 0
  47. self.__epsilon = 0.0001
  48. self.__max_itrs = 100
  49. self.__max_itrs_without_update = 3
  50. self.__num_inits_increase_order = 10
  51. self.__init_type_increase_order = 'K-MEANS++'
  52. self.__max_itrs_increase_order = 10
  53. self.__print_to_stdout = 2
  54. self.__median_id = np.inf # @todo: check
  55. self.__median_node_id_prefix = '' # @todo: check
  56. self.__node_maps_from_median = {}
  57. self.__sum_of_distances = 0
  58. self.__best_init_sum_of_distances = np.inf
  59. self.__converged_sum_of_distances = np.inf
  60. self.__runtime = None
  61. self.__runtime_initialized = None
  62. self.__runtime_converged = None
  63. self.__itrs = [] # @todo: check: {} ?
  64. self.__num_decrease_order = 0
  65. self.__num_increase_order = 0
  66. self.__num_converged_descents = 0
  67. self.__state = AlgorithmState.TERMINATED
  68. self.__label_names = {}
  69. if ged_env is None:
  70. raise Exception('The GED environment pointer passed to the constructor of MedianGraphEstimator is null.')
  71. elif not ged_env.is_initialized():
  72. raise Exception('The GED environment is uninitialized. Call gedlibpy.GEDEnv.init() before passing it to the constructor of MedianGraphEstimator.')
  73. def set_options(self, options):
  74. """Sets the options of the estimator.
  75. Parameters
  76. ----------
  77. options : string
  78. String that specifies with which options to run the estimator.
  79. """
  80. self.__set_default_options()
  81. options_map = misc.options_string_to_options_map(options)
  82. for opt_name, opt_val in options_map.items():
  83. if opt_name == 'init-type':
  84. self.__init_type = opt_val
  85. if opt_val != 'MEDOID' and opt_val != 'RANDOM' and opt_val != 'MIN' and opt_val != 'MAX' and opt_val != 'MEAN':
  86. raise Exception('Invalid argument ' + opt_val + ' for option init-type. Usage: options = "[--init-type RANDOM|MEDOID|EMPTY|MIN|MAX|MEAN] [...]"')
  87. elif opt_name == 'random-inits':
  88. try:
  89. self.__num_random_inits = int(opt_val)
  90. self.__desired_num_random_inits = self.__num_random_inits
  91. except:
  92. raise Exception('Invalid argument "' + opt_val + '" for option random-inits. Usage: options = "[--random-inits <convertible to int greater 0>]"')
  93. if self.__num_random_inits <= 0:
  94. raise Exception('Invalid argument "' + opt_val + '" for option random-inits. Usage: options = "[--random-inits <convertible to int greater 0>]"')
  95. elif opt_name == 'randomness':
  96. if opt_val == 'PSEUDO':
  97. self.__use_real_randomness = False
  98. elif opt_val == 'REAL':
  99. self.__use_real_randomness = True
  100. else:
  101. raise Exception('Invalid argument "' + opt_val + '" for option randomness. Usage: options = "[--randomness REAL|PSEUDO] [...]"')
  102. elif opt_name == 'stdout':
  103. if opt_val == '0':
  104. self.__print_to_stdout = 0
  105. elif opt_val == '1':
  106. self.__print_to_stdout = 1
  107. elif opt_val == '2':
  108. self.__print_to_stdout = 2
  109. else:
  110. raise Exception('Invalid argument "' + opt_val + '" for option stdout. Usage: options = "[--stdout 0|1|2] [...]"')
  111. elif opt_name == 'update-order':
  112. if opt_val == 'TRUE':
  113. self.__update_order = True
  114. elif opt_val == 'FALSE':
  115. self.__update_order = False
  116. else:
  117. raise Exception('Invalid argument "' + opt_val + '" for option update-order. Usage: options = "[--update-order TRUE|FALSE] [...]"')
  118. elif opt_name == 'refine':
  119. if opt_val == 'TRUE':
  120. self.__refine = True
  121. elif opt_val == 'FALSE':
  122. self.__refine = False
  123. else:
  124. raise Exception('Invalid argument "' + opt_val + '" for option refine. Usage: options = "[--refine TRUE|FALSE] [...]"')
  125. elif opt_name == 'time-limit':
  126. try:
  127. self.__time_limit_in_sec = float(opt_val)
  128. except:
  129. raise Exception('Invalid argument "' + opt_val + '" for option time-limit. Usage: options = "[--time-limit <convertible to double>] [...]')
  130. elif opt_name == 'max-itrs':
  131. try:
  132. self.__max_itrs = int(opt_val)
  133. except:
  134. raise Exception('Invalid argument "' + opt_val + '" for option max-itrs. Usage: options = "[--max-itrs <convertible to int>] [...]')
  135. elif opt_name == 'max-itrs-without-update':
  136. try:
  137. self.__max_itrs_without_update = int(opt_val)
  138. except:
  139. raise Exception('Invalid argument "' + opt_val + '" for option max-itrs-without-update. Usage: options = "[--max-itrs-without-update <convertible to int>] [...]')
  140. elif opt_name == 'seed':
  141. try:
  142. self.__seed = int(opt_val)
  143. except:
  144. raise Exception('Invalid argument "' + opt_val + '" for option seed. Usage: options = "[--seed <convertible to int greater equal 0>] [...]')
  145. elif opt_name == 'epsilon':
  146. try:
  147. self.__epsilon = float(opt_val)
  148. except:
  149. raise Exception('Invalid argument "' + opt_val + '" for option epsilon. Usage: options = "[--epsilon <convertible to double greater 0>] [...]')
  150. if self.__epsilon <= 0:
  151. raise Exception('Invalid argument "' + opt_val + '" for option epsilon. Usage: options = "[--epsilon <convertible to double greater 0>] [...]')
  152. elif opt_name == 'inits-increase-order':
  153. try:
  154. self.__num_inits_increase_order = int(opt_val)
  155. except:
  156. raise Exception('Invalid argument "' + opt_val + '" for option inits-increase-order. Usage: options = "[--inits-increase-order <convertible to int greater 0>]"')
  157. if self.__num_inits_increase_order <= 0:
  158. raise Exception('Invalid argument "' + opt_val + '" for option inits-increase-order. Usage: options = "[--inits-increase-order <convertible to int greater 0>]"')
  159. elif opt_name == 'init-type-increase-order':
  160. self.__init_type_increase_order = opt_val
  161. if opt_val != 'CLUSTERS' and opt_val != 'K-MEANS++':
  162. raise Exception('Invalid argument ' + opt_val + ' for option init-type-increase-order. Usage: options = "[--init-type-increase-order CLUSTERS|K-MEANS++] [...]"')
  163. elif opt_name == 'max-itrs-increase-order':
  164. try:
  165. self.__max_itrs_increase_order = int(opt_val)
  166. except:
  167. raise Exception('Invalid argument "' + opt_val + '" for option max-itrs-increase-order. Usage: options = "[--max-itrs-increase-order <convertible to int>] [...]')
  168. else:
  169. valid_options = '[--init-type <arg>] [--random-inits <arg>] [--randomness <arg>] [--seed <arg>] [--stdout <arg>] '
  170. valid_options += '[--time-limit <arg>] [--max-itrs <arg>] [--epsilon <arg>] '
  171. valid_options += '[--inits-increase-order <arg>] [--init-type-increase-order <arg>] [--max-itrs-increase-order <arg>]'
  172. raise Exception('Invalid option "' + opt_name + '". Usage: options = "' + valid_options + '"')
  173. def set_init_method(self, init_method, init_options=''):
  174. """Selects method to be used for computing the initial medoid graph.
  175. Parameters
  176. ----------
  177. init_method : string
  178. The selected method. Default: ged::Options::GEDMethod::BRANCH_UNIFORM.
  179. init_options : string
  180. The options for the selected method. Default: "".
  181. Notes
  182. -----
  183. Has no effect unless "--init-type MEDOID" is passed to set_options().
  184. """
  185. self.__init_method = init_method;
  186. self.__init_options = init_options;
  187. def set_descent_method(self, descent_method, descent_options=''):
  188. """Selects method to be used for block gradient descent..
  189. Parameters
  190. ----------
  191. descent_method : string
  192. The selected method. Default: ged::Options::GEDMethod::BRANCH_FAST.
  193. descent_options : string
  194. The options for the selected method. Default: "".
  195. Notes
  196. -----
  197. Has no effect unless "--init-type MEDOID" is passed to set_options().
  198. """
  199. self.__descent_method = descent_method;
  200. self.__descent_options = descent_options;
  201. def set_refine_method(self, refine_method, refine_options):
  202. """Selects method to be used for improving the sum of distances and the node maps for the converged median.
  203. Parameters
  204. ----------
  205. refine_method : string
  206. The selected method. Default: "IPFP".
  207. refine_options : string
  208. The options for the selected method. Default: "".
  209. Notes
  210. -----
  211. Has no effect if "--refine FALSE" is passed to set_options().
  212. """
  213. self.__refine_method = refine_method
  214. self.__refine_options = refine_options
  215. def run(self, graph_ids, set_median_id, gen_median_id):
  216. """Computes a generalized median graph.
  217. Parameters
  218. ----------
  219. graph_ids : list[integer]
  220. The IDs of the graphs for which the median should be computed. Must have been added to the environment passed to the constructor.
  221. set_median_id : integer
  222. The ID of the computed set-median. A dummy graph with this ID must have been added to the environment passed to the constructor. Upon termination, the computed median can be obtained via gklearn.gedlib.gedlibpy.GEDEnv.get_graph().
  223. gen_median_id : integer
  224. The ID of the computed generalized median. Upon termination, the computed median can be obtained via gklearn.gedlib.gedlibpy.GEDEnv.get_graph().
  225. """
  226. # Sanity checks.
  227. if len(graph_ids) == 0:
  228. raise Exception('Empty vector of graph IDs, unable to compute median.')
  229. all_graphs_empty = True
  230. for graph_id in graph_ids:
  231. if self.__ged_env.get_graph_num_nodes(graph_id) > 0:
  232. self.__median_node_id_prefix = self.__ged_env.get_original_node_ids(graph_id)[0]
  233. all_graphs_empty = False
  234. break
  235. if all_graphs_empty:
  236. raise Exception('All graphs in the collection are empty.')
  237. # Start timer and record start time.
  238. start = time.time()
  239. timer = Timer(self.__time_limit_in_sec)
  240. self.__median_id = gen_median_id
  241. self.__state = AlgorithmState.TERMINATED
  242. # Get ExchangeGraph representations of the input graphs.
  243. graphs = {}
  244. for graph_id in graph_ids:
  245. # @todo: get_nx_graph() function may need to be modified according to the coming code.
  246. graphs[graph_id] = self.__ged_env.get_nx_graph(graph_id, True, True, False)
  247. # print(self.__ged_env.get_graph_internal_id(0))
  248. # print(graphs[0].graph)
  249. # print(graphs[0].nodes(data=True))
  250. # print(graphs[0].edges(data=True))
  251. # print(nx.adjacency_matrix(graphs[0]))
  252. # Construct initial medians.
  253. medians = []
  254. self.__construct_initial_medians(graph_ids, timer, medians)
  255. end_init = time.time()
  256. self.__runtime_initialized = end_init - start
  257. # print(medians[0].graph)
  258. # print(medians[0].nodes(data=True))
  259. # print(medians[0].edges(data=True))
  260. # print(nx.adjacency_matrix(medians[0]))
  261. # Reset information about iterations and number of times the median decreases and increases.
  262. self.__itrs = [0] * len(medians)
  263. self.__num_decrease_order = 0
  264. self.__num_increase_order = 0
  265. self.__num_converged_descents = 0
  266. # Initialize the best median.
  267. best_sum_of_distances = np.inf
  268. self.__best_init_sum_of_distances = np.inf
  269. node_maps_from_best_median = {}
  270. # Run block gradient descent from all initial medians.
  271. self.__ged_env.set_method(self.__descent_method, self.__descent_options)
  272. for median_pos in range(0, len(medians)):
  273. # Terminate if the timer has expired and at least one SOD has been computed.
  274. if timer.expired() and median_pos > 0:
  275. break
  276. # Print information about current iteration.
  277. if self.__print_to_stdout == 2:
  278. print('\n===========================================================')
  279. print('Block gradient descent for initial median', str(median_pos + 1), 'of', str(len(medians)), '.')
  280. print('-----------------------------------------------------------')
  281. # Get reference to the median.
  282. median = medians[median_pos]
  283. # Load initial median into the environment.
  284. self.__ged_env.load_nx_graph(median, gen_median_id)
  285. self.__ged_env.init(self.__ged_env.get_init_type())
  286. # Print information about current iteration.
  287. if self.__print_to_stdout == 2:
  288. progress = tqdm(desc='Computing initial node maps', total=len(graph_ids), file=sys.stdout)
  289. # Compute node maps and sum of distances for initial median.
  290. self.__sum_of_distances = 0
  291. self.__node_maps_from_median.clear()
  292. for graph_id in graph_ids:
  293. self.__ged_env.run_method(gen_median_id, graph_id)
  294. self.__node_maps_from_median[graph_id] = self.__ged_env.get_node_map(gen_median_id, graph_id)
  295. # print(self.__node_maps_from_median[graph_id])
  296. self.__sum_of_distances += self.__node_maps_from_median[graph_id].induced_cost()
  297. # print(self.__sum_of_distances)
  298. # Print information about current iteration.
  299. if self.__print_to_stdout == 2:
  300. progress.update(1)
  301. self.__best_init_sum_of_distances = min(self.__best_init_sum_of_distances, self.__sum_of_distances)
  302. self.__ged_env.load_nx_graph(median, set_median_id)
  303. print(self.__best_init_sum_of_distances)
  304. # Print information about current iteration.
  305. if self.__print_to_stdout == 2:
  306. print('\n')
  307. # Run block gradient descent from initial median.
  308. converged = False
  309. itrs_without_update = 0
  310. while not self.__termination_criterion_met(converged, timer, self.__itrs[median_pos], itrs_without_update):
  311. # Print information about current iteration.
  312. if self.__print_to_stdout == 2:
  313. print('\n===========================================================')
  314. print('Iteration', str(self.__itrs[median_pos] + 1), 'for initial median', str(median_pos + 1), 'of', str(len(medians)), '.')
  315. print('-----------------------------------------------------------')
  316. # Initialize flags that tell us what happened in the iteration.
  317. median_modified = False
  318. node_maps_modified = False
  319. decreased_order = False
  320. increased_order = False
  321. # Update the median. # @todo!!!!!!!!!!!!!!!!!!!!!!
  322. median_modified = self.__update_median(graphs, median)
  323. if self.__update_order:
  324. if not median_modified or self.__itrs[median_pos] == 0:
  325. decreased_order = self.__decrease_order(graphs, median)
  326. if not decreased_order or self.__itrs[median_pos] == 0:
  327. increased_order = self.__increase_order(graphs, median)
  328. # Update the number of iterations without update of the median.
  329. if median_modified or decreased_order or increased_order:
  330. itrs_without_update = 0
  331. else:
  332. itrs_without_update += 1
  333. # Print information about current iteration.
  334. if self.__print_to_stdout == 2:
  335. print('Loading median to environment: ... ', end='')
  336. # Load the median into the environment.
  337. # @todo: should this function use the original node label?
  338. self.__ged_env.load_nx_graph(median, gen_median_id)
  339. self.__ged_env.init(self.__ged_env.get_init_type())
  340. # Print information about current iteration.
  341. if self.__print_to_stdout == 2:
  342. print('done.')
  343. # Print information about current iteration.
  344. if self.__print_to_stdout == 2:
  345. print('Updating induced costs: ... ', end='')
  346. # Compute induced costs of the old node maps w.r.t. the updated median.
  347. for graph_id in graph_ids:
  348. # print(self.__node_maps_from_median[graph_id].induced_cost())
  349. self.__ged_env.compute_induced_cost(gen_median_id, graph_id, self.__node_maps_from_median[graph_id])
  350. # print('---------------------------------------')
  351. # print(self.__node_maps_from_median[graph_id].induced_cost())
  352. # @todo:!!!!!!!!!!!!!!!!!!!!!!!!!!!!This value is a slight different from the c++ program, which might be a bug! Use it very carefully!
  353. # Print information about current iteration.
  354. if self.__print_to_stdout == 2:
  355. print('done.')
  356. # Update the node maps.
  357. node_maps_modified = self.__update_node_maps() # @todo
  358. # Update the order of the median if no improvement can be found with the current order.
  359. # Update the sum of distances.
  360. old_sum_of_distances = self.__sum_of_distances
  361. self.__sum_of_distances = 0
  362. for graph_id, node_map in self.__node_maps_from_median.items():
  363. self.__sum_of_distances += node_map.induced_cost()
  364. # print(self.__sum_of_distances)
  365. # Print information about current iteration.
  366. if self.__print_to_stdout == 2:
  367. print('Old local SOD: ', old_sum_of_distances)
  368. print('New local SOD: ', self.__sum_of_distances)
  369. print('Best converged SOD: ', best_sum_of_distances)
  370. print('Modified median: ', median_modified)
  371. print('Modified node maps: ', node_maps_modified)
  372. print('Decreased order: ', decreased_order)
  373. print('Increased order: ', increased_order)
  374. print('===========================================================\n')
  375. converged = not (median_modified or node_maps_modified or decreased_order or increased_order)
  376. self.__itrs[median_pos] += 1
  377. # Update the best median.
  378. if self.__sum_of_distances < best_sum_of_distances:
  379. best_sum_of_distances = self.__sum_of_distances
  380. node_maps_from_best_median = self.__node_maps_from_median.copy() # @todo: this is a shallow copy, not sure if it is enough.
  381. best_median = median
  382. # Update the number of converged descents.
  383. if converged:
  384. self.__num_converged_descents += 1
  385. # Store the best encountered median.
  386. self.__sum_of_distances = best_sum_of_distances
  387. self.__node_maps_from_median = node_maps_from_best_median
  388. self.__ged_env.load_nx_graph(best_median, gen_median_id)
  389. self.__ged_env.init(self.__ged_env.get_init_type())
  390. end_descent = time.time()
  391. self.__runtime_converged = end_descent - start
  392. # Refine the sum of distances and the node maps for the converged median.
  393. self.__converged_sum_of_distances = self.__sum_of_distances
  394. if self.__refine:
  395. self.__improve_sum_of_distances(timer)
  396. # Record end time, set runtime and reset the number of initial medians.
  397. end = time.time()
  398. self.__runtime = end - start
  399. self.__num_random_inits = self.__desired_num_random_inits
  400. # Print global information.
  401. if self.__print_to_stdout != 0:
  402. print('\n===========================================================')
  403. print('Finished computation of generalized median graph.')
  404. print('-----------------------------------------------------------')
  405. print('Best SOD after initialization: ', self.__best_init_sum_of_distances)
  406. print('Converged SOD: ', self.__converged_sum_of_distances)
  407. if self.__refine:
  408. print('Refined SOD: ', self.__sum_of_distances)
  409. print('Overall runtime: ', self.__runtime)
  410. print('Runtime of initialization: ', self.__runtime_initialized)
  411. print('Runtime of block gradient descent: ', self.__runtime_converged - self.__runtime_initialized)
  412. if self.__refine:
  413. print('Runtime of refinement: ', self.__runtime - self.__runtime_converged)
  414. print('Number of initial medians: ', len(medians))
  415. total_itr = 0
  416. num_started_descents = 0
  417. for itr in self.__itrs:
  418. total_itr += itr
  419. if itr > 0:
  420. num_started_descents += 1
  421. print('Size of graph collection: ', len(graph_ids))
  422. print('Number of started descents: ', num_started_descents)
  423. print('Number of converged descents: ', self.__num_converged_descents)
  424. print('Overall number of iterations: ', total_itr)
  425. print('Overall number of times the order decreased: ', self.__num_decrease_order)
  426. print('Overall number of times the order increased: ', self.__num_increase_order)
  427. print('===========================================================\n')
  428. def __improve_sum_of_distances(self, timer): # @todo: go through and test
  429. # Use method selected for refinement phase.
  430. self.__ged_env.set_method(self.__refine_method, self.__refine_options)
  431. # Print information about current iteration.
  432. if self.__print_to_stdout == 2:
  433. progress = tqdm(desc='Improving node maps', total=len(self.__node_maps_from_median), file=sys.stdout)
  434. print('\n===========================================================')
  435. print('Improving node maps and SOD for converged median.')
  436. print('-----------------------------------------------------------')
  437. progress.update(1)
  438. # Improving the node maps.
  439. for graph_id, node_map in self.__node_maps_from_median.items():
  440. if time.expired():
  441. if self.__state == AlgorithmState.TERMINATED:
  442. self.__state = AlgorithmState.CONVERGED
  443. break
  444. self.__ged_env.run_method(self.__gen_median_id, graph_id)
  445. if self.__ged_env.get_upper_bound(self.__gen_median_id, graph_id) < node_map.induced_cost():
  446. self.__node_maps_from_median[graph_id] = self.__ged_env.get_node_map(self.__gen_median_id, graph_id)
  447. self.__sum_of_distances += self.__node_maps_from_median[graph_id].induced_cost()
  448. # Print information.
  449. if self.__print_to_stdout == 2:
  450. progress.update(1)
  451. self.__sum_of_distances = 0.0
  452. for key, val in self.__node_maps_from_median.items():
  453. self.__sum_of_distances += val.induced_cost()
  454. # Print information.
  455. if self.__print_to_stdout == 2:
  456. print('===========================================================\n')
  457. def __median_available(self):
  458. return self.__gen_median_id != np.inf
  459. def get_state(self):
  460. if not self.__median_available():
  461. raise Exception('No median has been computed. Call run() before calling get_state().')
  462. return self.__state
  463. def get_sum_of_distances(self, state=''):
  464. """Returns the sum of distances.
  465. Parameters
  466. ----------
  467. state : string
  468. The state of the estimator. Can be 'initialized' or 'converged'. Default: ""
  469. Returns
  470. -------
  471. float
  472. The sum of distances (SOD) of the median when the estimator was in the state `state` during the last call to run(). If `state` is not given, the converged SOD (without refinement) or refined SOD (with refinement) is returned.
  473. """
  474. if not self.__median_available():
  475. raise Exception('No median has been computed. Call run() before calling get_sum_of_distances().')
  476. if state == 'initialized':
  477. return self.__best_init_sum_of_distances
  478. if state == 'converged':
  479. return self.__converged_sum_of_distances
  480. return self.__sum_of_distances
  481. def __set_default_options(self):
  482. self.__init_type = 'RANDOM'
  483. self.__num_random_inits = 10
  484. self.__desired_num_random_inits = 10
  485. self.__use_real_randomness = True
  486. self.__seed = 0
  487. self.__update_order = True
  488. self.__refine = True
  489. self.__time_limit_in_sec = 0
  490. self.__epsilon = 0.0001
  491. self.__max_itrs = 100
  492. self.__max_itrs_without_update = 3
  493. self.__num_inits_increase_order = 10
  494. self.__init_type_increase_order = 'K-MEANS++'
  495. self.__max_itrs_increase_order = 10
  496. self.__print_to_stdout = 2
  497. self.__label_names = {}
  498. def __construct_initial_medians(self, graph_ids, timer, initial_medians):
  499. # Print information about current iteration.
  500. if self.__print_to_stdout == 2:
  501. print('\n===========================================================')
  502. print('Constructing initial median(s).')
  503. print('-----------------------------------------------------------')
  504. # Compute or sample the initial median(s).
  505. initial_medians.clear()
  506. if self.__init_type == 'MEDOID':
  507. self.__compute_medoid(graph_ids, timer, initial_medians)
  508. elif self.__init_type == 'MAX':
  509. pass # @todo
  510. # compute_max_order_graph_(graph_ids, initial_medians)
  511. elif self.__init_type == 'MIN':
  512. pass # @todo
  513. # compute_min_order_graph_(graph_ids, initial_medians)
  514. elif self.__init_type == 'MEAN':
  515. pass # @todo
  516. # compute_mean_order_graph_(graph_ids, initial_medians)
  517. else:
  518. pass # @todo
  519. # sample_initial_medians_(graph_ids, initial_medians)
  520. # Print information about current iteration.
  521. if self.__print_to_stdout == 2:
  522. print('===========================================================')
  523. def __compute_medoid(self, graph_ids, timer, initial_medians):
  524. # Use method selected for initialization phase.
  525. self.__ged_env.set_method(self.__init_method, self.__init_options)
  526. # Print information about current iteration.
  527. if self.__print_to_stdout == 2:
  528. progress = tqdm(desc='Computing medoid', total=len(graph_ids), file=sys.stdout)
  529. # Compute the medoid.
  530. medoid_id = graph_ids[0]
  531. best_sum_of_distances = np.inf
  532. for g_id in graph_ids:
  533. if timer.expired():
  534. self.__state = AlgorithmState.CALLED
  535. break
  536. sum_of_distances = 0
  537. for h_id in graph_ids:
  538. self.__ged_env.run_method(g_id, h_id)
  539. sum_of_distances += self.__ged_env.get_upper_bound(g_id, h_id)
  540. if sum_of_distances < best_sum_of_distances:
  541. best_sum_of_distances = sum_of_distances
  542. medoid_id = g_id
  543. # Print information about current iteration.
  544. if self.__print_to_stdout == 2:
  545. progress.update(1)
  546. initial_medians.append(self.__ged_env.get_nx_graph(medoid_id, True, True, False)) # @todo
  547. # Print information about current iteration.
  548. if self.__print_to_stdout == 2:
  549. print('\n')
  550. def __termination_criterion_met(self, converged, timer, itr, itrs_without_update):
  551. if timer.expired() or (itr >= self.__max_itrs if self.__max_itrs >= 0 else False):
  552. if self.__state == AlgorithmState.TERMINATED:
  553. self.__state = AlgorithmState.INITIALIZED
  554. return True
  555. return converged or (itrs_without_update > self.__max_itrs_without_update if self.__max_itrs_without_update >= 0 else False)
  556. def __update_median(self, graphs, median):
  557. # Print information about current iteration.
  558. if self.__print_to_stdout == 2:
  559. print('Updating median: ', end='')
  560. # Store copy of the old median.
  561. old_median = median.copy() # @todo: this is just a shallow copy.
  562. # Update the node labels.
  563. if self.__labeled_nodes:
  564. self.__update_node_labels(graphs, median)
  565. # Update the edges and their labels.
  566. self.__update_edges(graphs, median)
  567. # Print information about current iteration.
  568. if self.__print_to_stdout == 2:
  569. print('done.')
  570. return not self.__are_graphs_equal(median, old_median)
  571. def __update_node_labels(self, graphs, median):
  572. # Print information about current iteration.
  573. if self.__print_to_stdout == 2:
  574. print('nodes ... ', end='')
  575. # Iterate through all nodes of the median.
  576. for i in range(0, nx.number_of_nodes(median)):
  577. # print('i: ', i)
  578. # Collect the labels of the substituted nodes.
  579. node_labels = []
  580. for graph_id, graph in graphs.items():
  581. # print('graph_id: ', graph_id)
  582. # print(self.__node_maps_from_median[graph_id])
  583. k = self.__node_maps_from_median[graph_id].image(i)
  584. # print('k: ', k)
  585. if k != np.inf:
  586. node_labels.append(graph.nodes[k])
  587. # Compute the median label and update the median.
  588. if len(node_labels) > 0:
  589. # median_label = self.__ged_env.get_median_node_label(node_labels)
  590. median_label = self.__get_median_node_label(node_labels)
  591. if self.__ged_env.get_node_rel_cost(median.nodes[i], median_label) > self.__epsilon:
  592. nx.set_node_attributes(median, {i: median_label})
  593. def __update_edges(self, graphs, median):
  594. # Print information about current iteration.
  595. if self.__print_to_stdout == 2:
  596. print('edges ... ', end='')
  597. # # Clear the adjacency lists of the median and reset number of edges to 0.
  598. # median_edges = list(median.edges)
  599. # for (head, tail) in median_edges:
  600. # median.remove_edge(head, tail)
  601. # @todo: what if edge is not labeled?
  602. # Iterate through all possible edges (i,j) of the median.
  603. for i in range(0, nx.number_of_nodes(median)):
  604. for j in range(i + 1, nx.number_of_nodes(median)):
  605. # Collect the labels of the edges to which (i,j) is mapped by the node maps.
  606. edge_labels = []
  607. for graph_id, graph in graphs.items():
  608. k = self.__node_maps_from_median[graph_id].image(i)
  609. l = self.__node_maps_from_median[graph_id].image(j)
  610. if k != np.inf and l != np.inf:
  611. if graph.has_edge(k, l):
  612. edge_labels.append(graph.edges[(k, l)])
  613. # Compute the median edge label and the overall edge relabeling cost.
  614. rel_cost = 0
  615. median_label = self.__ged_env.get_edge_label(1)
  616. if median.has_edge(i, j):
  617. median_label = median.edges[(i, j)]
  618. if self.__labeled_edges and len(edge_labels) > 0:
  619. new_median_label = self.__get_median_edge_label(edge_labels)
  620. if self.__ged_env.get_edge_rel_cost(median_label, new_median_label) > self.__epsilon:
  621. median_label = new_median_label
  622. for edge_label in edge_labels:
  623. rel_cost += self.__ged_env.get_edge_rel_cost(median_label, edge_label)
  624. # Update the median.
  625. if median.has_edge(i, j):
  626. median.remove_edge(i, j)
  627. if rel_cost < (self.__edge_ins_cost + self.__edge_del_cost) * len(edge_labels) - self.__edge_del_cost * len(graphs):
  628. median.add_edge(i, j, **median_label)
  629. # else:
  630. # if median.has_edge(i, j):
  631. # median.remove_edge(i, j)
  632. def __update_node_maps(self):
  633. # Print information about current iteration.
  634. if self.__print_to_stdout == 2:
  635. progress = tqdm(desc='Updating node maps', total=len(self.__node_maps_from_median), file=sys.stdout)
  636. # Update the node maps.
  637. node_maps_were_modified = False
  638. for graph_id, node_map in self.__node_maps_from_median.items():
  639. self.__ged_env.run_method(self.__median_id, graph_id)
  640. if self.__ged_env.get_upper_bound(self.__median_id, graph_id) < node_map.induced_cost() - self.__epsilon:
  641. # xxx = self.__node_maps_from_median[graph_id]
  642. self.__node_maps_from_median[graph_id] = self.__ged_env.get_node_map(self.__median_id, graph_id)
  643. # yyy = self.__node_maps_from_median[graph_id]
  644. node_maps_were_modified = True
  645. # Print information about current iteration.
  646. if self.__print_to_stdout == 2:
  647. progress.update(1)
  648. # Print information about current iteration.
  649. if self.__print_to_stdout == 2:
  650. print('\n')
  651. # Return true if the node maps were modified.
  652. return node_maps_were_modified
  653. def __decrease_order(self, graphs, median):
  654. # Print information about current iteration
  655. if self.__print_to_stdout == 2:
  656. print('Trying to decrease order: ... ', end='')
  657. # Initialize ID of the node that is to be deleted.
  658. id_deleted_node = [None] # @todo: or np.inf
  659. decreased_order = False
  660. # Decrease the order as long as the best deletion delta is negative.
  661. while self.__compute_best_deletion_delta(graphs, median, id_deleted_node) < -self.__epsilon: # @todo
  662. decreased_order = True
  663. median = self.__delete_node_from_median(id_deleted_node[0], median)
  664. # Print information about current iteration.
  665. if self.__print_to_stdout == 2:
  666. print('done.')
  667. # Return true iff the order was decreased.
  668. return decreased_order
  669. def __compute_best_deletion_delta(self, graphs, median, id_deleted_node):
  670. best_delta = 0.0
  671. # Determine node that should be deleted (if any).
  672. for i in range(0, nx.number_of_nodes(median)):
  673. # Compute cost delta.
  674. delta = 0.0
  675. for graph_id, graph in graphs.items():
  676. k = self.__node_maps_from_median[graph_id].image(i)
  677. if k == np.inf:
  678. delta -= self.__node_del_cost
  679. else:
  680. delta += self.__node_ins_cost - self.__ged_env.get_node_rel_cost(median.nodes[i], graph.nodes[k])
  681. for j, j_label in median[i].items():
  682. l = self.__node_maps_from_median[graph_id].image(j)
  683. if k == np.inf or l == np.inf:
  684. delta -= self.__edge_del_cost
  685. elif not graph.has_edge(k, l):
  686. delta -= self.__edge_del_cost
  687. else:
  688. delta += self.__edge_ins_cost - self.__ged_env.get_edge_rel_cost(j_label, graph.edges[(k, l)])
  689. # Update best deletion delta.
  690. if delta < best_delta - self.__epsilon:
  691. best_delta = delta
  692. id_deleted_node[0] = i
  693. # id_deleted_node[0] = 3 # @todo:
  694. return best_delta
  695. def __delete_node_from_median(self, id_deleted_node, median):
  696. # Update the median.
  697. median.remove_node(id_deleted_node)
  698. median = nx.convert_node_labels_to_integers(median, first_label=0, ordering='default', label_attribute=None) # @todo: This doesn't guarantee that the order is the same as in G.
  699. # Update the node maps.
  700. for key, node_map in self.__node_maps_from_median.items():
  701. new_node_map = NodeMap(nx.number_of_nodes(median), node_map.num_target_nodes())
  702. is_unassigned_target_node = [True] * node_map.num_target_nodes()
  703. for i in range(0, nx.number_of_nodes(median) + 1):
  704. if i != id_deleted_node:
  705. new_i = (i if i < id_deleted_node else i - 1)
  706. k = node_map.image(i)
  707. new_node_map.add_assignment(new_i, k)
  708. if k != np.inf:
  709. is_unassigned_target_node[k] = False
  710. for k in range(0, node_map.num_target_nodes()):
  711. if is_unassigned_target_node[k]:
  712. new_node_map.add_assignment(np.inf, k)
  713. # print(new_node_map.get_forward_map(), new_node_map.get_backward_map())
  714. self.__node_maps_from_median[key] = new_node_map
  715. # Increase overall number of decreases.
  716. self.__num_decrease_order += 1
  717. return median
  718. def __increase_order(self, graphs, median):
  719. # Print information about current iteration.
  720. if self.__print_to_stdout == 2:
  721. print('Trying to increase order: ... ', end='')
  722. # Initialize the best configuration and the best label of the node that is to be inserted.
  723. best_config = {}
  724. best_label = self.__ged_env.get_node_label(1)
  725. increased_order = False
  726. # Increase the order as long as the best insertion delta is negative.
  727. while self.__compute_best_insertion_delta(graphs, best_config, best_label) > - self.__epsilon:
  728. increased_order = True
  729. self.__add_node_to_median(best_config, best_label, median)
  730. # Print information about current iteration.
  731. if self.__print_to_stdout == 2:
  732. print('done.')
  733. # Return true iff the order was increased.
  734. return increased_order
  735. def __compute_best_insertion_delta(self, graphs, best_config, best_label):
  736. # Construct sets of inserted nodes.
  737. no_inserted_node = True
  738. inserted_nodes = {}
  739. for graph_id, graph in graphs.items():
  740. inserted_nodes[graph_id] = []
  741. best_config[graph_id] = np.inf
  742. for k in range(nx.number_of_nodes(graph)):
  743. if self.__node_maps_from_median[graph_id].pre_image(k) == np.inf:
  744. no_inserted_node = False
  745. inserted_nodes[graph_id].append((k, tuple(item for item in graph.nodes[k].items()))) # @todo: can order of label names be garantteed?
  746. # Return 0.0 if no node is inserted in any of the graphs.
  747. if no_inserted_node:
  748. return 0.0
  749. # Compute insertion configuration, label, and delta.
  750. best_delta = 0.0 # @todo
  751. if len(self.__label_names['node_labels']) == 0 and len(self.__label_names['node_attrs']) == 0: # @todo
  752. best_delta = self.__compute_insertion_delta_unlabeled(inserted_nodes, best_config, best_label)
  753. elif self.__constant_node_costs:
  754. best_delta = self.__compute_insertion_delta_constant(inserted_nodes, best_config, best_label)
  755. else:
  756. best_delta = self.__compute_insertion_delta_generic(inserted_nodes, best_config, best_label)
  757. # Return the best delta.
  758. return best_delta
  759. def __compute_insertion_delta_unlabeled(self, inserted_nodes, best_config, best_label):
  760. # Construct the nest configuration and compute its insertion delta.
  761. best_delta = 0.0
  762. best_config.clear()
  763. for graph_id, node_set in inserted_nodes.items():
  764. if len(node_set) == 0:
  765. best_config[graph_id] = np.inf
  766. best_delta += self.__node_del_cost
  767. else:
  768. best_config[graph_id] = node_set[0][0]
  769. best_delta -= self.__node_ins_cost
  770. # Return the best insertion delta.
  771. return best_delta
  772. def __compute_insertion_delta_constant(self, inserted_nodes, best_config, best_label):
  773. # Construct histogram and inverse label maps.
  774. hist = {}
  775. inverse_label_maps = {}
  776. for graph_id, node_set in inserted_nodes.items():
  777. inverse_label_maps[graph_id] = {}
  778. for node in node_set:
  779. k = node[0]
  780. label = node[1]
  781. if label not in inverse_label_maps[graph_id]:
  782. inverse_label_maps[graph_id][label] = k
  783. if label not in hist:
  784. hist[label] = 1
  785. else:
  786. hist[label] += 1
  787. # Determine the best label.
  788. best_count = 0
  789. for key, val in hist.items():
  790. if val > best_count:
  791. best_count = val
  792. best_label_tuple = key
  793. # get best label.
  794. best_label.clear()
  795. for key, val in best_label_tuple:
  796. best_label[key] = val
  797. # Construct the best configuration and compute its insertion delta.
  798. best_config.clear()
  799. best_delta = 0.0
  800. node_rel_cost = self.__ged_env.get_node_rel_cost(self.__ged_env.get_node_label(1), self.__ged_env.get_node_label(2))
  801. triangle_ineq_holds = (node_rel_cost <= self.__node_del_cost + self.__node_ins_cost)
  802. for graph_id, _ in inserted_nodes.items():
  803. if best_label_tuple in inverse_label_maps[graph_id]:
  804. best_config[graph_id] = inverse_label_maps[graph_id][best_label_tuple]
  805. best_delta -= self.__node_ins_cost
  806. elif triangle_ineq_holds and not len(inserted_nodes[graph_id]) == 0:
  807. best_config[graph_id] = inserted_nodes[graph_id][0][0]
  808. best_delta += node_rel_cost - self.__node_ins_cost
  809. else:
  810. best_config[graph_id] = np.inf
  811. best_delta += self.__node_del_cost
  812. # Return the best insertion delta.
  813. return best_delta
  814. def __compute_insertion_delta_generic(self, inserted_nodes, best_config, best_label):
  815. # Collect all node labels of inserted nodes.
  816. node_labels = []
  817. for _, node_set in inserted_nodes.items():
  818. for node in node_set:
  819. node_labels.append(node[1])
  820. # Compute node label medians that serve as initial solutions for block gradient descent.
  821. initial_node_labels = []
  822. self.__compute_initial_node_labels(node_labels, initial_node_labels)
  823. # Determine best insertion configuration, label, and delta via parallel block gradient descent from all initial node labels.
  824. best_delta = 0.0
  825. for node_label in initial_node_labels:
  826. # Construct local configuration.
  827. config = {}
  828. for graph_id, _ in inserted_nodes.items():
  829. config[graph_id] = tuple((np.inf, self.__ged_env.get_node_label(1)))
  830. # Run block gradient descent.
  831. converged = False
  832. itr = 0
  833. while not self.__insertion_termination_criterion_met(converged, itr):
  834. converged = not self.__update_config_(node_label, inserted_nodes, config, node_labels)
  835. converged = converged and (not self.__update_node_label(node_labels, node_label))
  836. itr += 1
  837. # Compute insertion delta of converged solution.
  838. delta = 0.0
  839. for _, node in config.items():
  840. if node[0] == np.inf:
  841. delta += self.__node_del_cost
  842. else:
  843. delta += self.__ged_env.node_rel_cost(node_label, node[1]) - self.__node_ins_cost
  844. # Update best delta and global configuration if improvement has been found.
  845. if delta < best_delta - self.__epsilon:
  846. best_delta = delta
  847. best_label = node_label # @todo: may be wrong.
  848. best_config.clear()
  849. for graph_id, k in config.items():
  850. best_config[graph_id] = k
  851. # Return the best delta.
  852. return best_delta
  853. def __compute_initial_node_labels(self, node_labels, median_labels):
  854. median_labels.clear()
  855. if self.__use_real_randomness: # @todo: may not work if parallelized.
  856. rng = np.random.randint(size=1)
  857. urng = np.random.RandomState(seed=rng[0])
  858. else:
  859. urng = np.random.RandomState(seed=self.__seed)
  860. # Generate the initial node label medians.
  861. if self.__init_type_increase_order == 'K-MEANS++':
  862. # Use k-means++ heuristic to generate the initial node label medians.
  863. already_selected = [False] * len(node_labels)
  864. selected_label_id = urng.uniform(low=0, high=len(node_labels), size=1)[0]
  865. median_labels.append(node_labels[selected_label_id])
  866. already_selected[selected_label_id] = True
  867. while len(median_labels) > self.__num_inits_increase_order:
  868. weights = [np.inf] * len(node_labels)
  869. for label_id in range(0, len(node_labels)):
  870. if already_selected[label_id]:
  871. weights[label_id] = 0
  872. continue
  873. for label in median_labels:
  874. weights[label_id] = min(weights[label_id], self.__ged_env.node_rel_cost(label, node_labels[label_id]))
  875. selected_label_id = urng.choice(range(0, len(weights)), size=1, p=weights)
  876. median_labels.append(node_labels[selected_label_id])
  877. already_selected[selected_label_id] = True
  878. else:
  879. # Compute the initial node medians as the medians of randomly generated clusters of (roughly) equal size.
  880. # @todo: go through and test.
  881. shuffled_node_labels = [np.inf] * len(node_labels) #@todo: random?
  882. # @todo: std::shuffle(shuffled_node_labels.begin(), shuffled_node_labels.end(), urng);?
  883. cluster_size = len(node_labels) / self.__num_inits_increase_order
  884. pos = 0.0
  885. cluster = []
  886. while len(median_labels) < self.__num_inits_increase_order - 1:
  887. while pos < (len(median_labels) + 1) * cluster_size:
  888. cluster.append(shuffled_node_labels[pos])
  889. pos += 1
  890. median_labels.append(self.__get_median_node_label(cluster))
  891. cluster.clear()
  892. while pos < len(shuffled_node_labels):
  893. pos += 1
  894. cluster.append(shuffled_node_labels[pos])
  895. median_labels.append(self.__get_median_node_label(cluster))
  896. cluster.clear()
  897. # Run Lloyd's Algorithm.
  898. converged = False
  899. closest_median_ids = [np.inf] * len(node_labels)
  900. clusters = [[] for _ in len(median_labels)]
  901. itr = 1
  902. while not self.__insertion_termination_criterion_met(converged, itr):
  903. converged = not self.__update_clusters(node_labels, median_labels, closest_median_ids)
  904. if not converged:
  905. for cluster in clusters:
  906. cluster.clear()
  907. for label_id in range(0, len(node_labels)):
  908. cluster[closest_median_ids[label_id]].append(node_labels[label_id])
  909. for cluster_id in range(0, len(clusters)):
  910. self.__update_node_label(cluster[cluster_id], median_labels[cluster_id])
  911. itr += 1
  912. def __insertion_termination_criterion_met(self, converged, itr):
  913. return converged or (itr >= self.__max_itrs_increase_order if self.__max_itrs_increase_order > 0 else False)
  914. def __update_config_(self, node_label, inserted_nodes, config, node_labels):
  915. # Determine the best configuration.
  916. config_modified = False
  917. for graph_id, node_set in inserted_nodes.items():
  918. best_assignment = config[graph_id]
  919. best_cost = 0.0
  920. if best_assignment[0] == np.inf:
  921. best_cost = self.__node_del_cost
  922. else:
  923. bets_cost = self.__ged_env.node_rel_cost(node_label, best_assignment[1]) - self.__node_ins_cost
  924. for node in node_set:
  925. cost = self.__ged_env.node_rel_cost(node_label, node[1]) - self.__node_ins_cost
  926. if cost < best_cost - self.__epsilon:
  927. best_cost = cost
  928. best_assignment = node
  929. config_modified = True
  930. if self.__node_del_cost < best_cost - self.__epsilon:
  931. best_cost = self.__node_del_cost
  932. best_assignment[0] = np.inf # @todo: work?
  933. config_modified = True
  934. config[graph_id] = best_assignment
  935. # Collect the node labels contained in the best configuration.
  936. node_labels.clear()
  937. for key, val in config.items():
  938. if val[0] != np.inf:
  939. node_labels.append(val[1])
  940. # Return true if the configuration was modified.
  941. return config_modified
  942. def __update_node_label(self, node_labels, node_label):
  943. new_node_label = self.__get_median_node_label(node_labels)
  944. if self.__ged_env.node_rel_cost(new_node_label, node_label) > self.__epsilon:
  945. node_label = new_node_label # @todo: may be wrong
  946. return True
  947. return False
  948. def __update_clusters(self, node_labels, median_labels, closest_median_ids):
  949. # Determine the closest median for each node label.
  950. clusters_modified = False
  951. for label_id in range(0, len(node_labels)):
  952. closest_median_id = np.inf
  953. dist_to_closest_median = np.inf
  954. for median_id in range(0, len(median_labels)):
  955. dist_to_median = self.__ged_env.node_rel_cost(median_labels[median_id], node_labels[label_id])
  956. if dist_to_median < dist_to_closest_median - self.__epsilon:
  957. dist_to_closest_median = dist_to_median
  958. closest_median_id = median_id
  959. if closest_median_id != closest_median_ids[label_id]:
  960. closest_median_ids[label_id] = closest_median_id
  961. clusters_modified = True
  962. # Return true if the clusters were modified.
  963. return clusters_modified
  964. def __add_node_to_median(self, best_config, best_label, median):
  965. # Update the median.
  966. median.add_node(nx.number_of_nodes(median), **best_label)
  967. # Update the node maps.
  968. for graph_id, node_map in self.__node_maps_from_median.items():
  969. node_map_as_rel = []
  970. node_map.as_relation(node_map_as_rel)
  971. new_node_map = NodeMap(nx.number_of_nodes(median), node_map.num_target_nodes())
  972. for assignment in node_map_as_rel:
  973. new_node_map.add_assignment(assignment[0], assignment[1])
  974. new_node_map.add_assignment(nx.number_of_nodes(median) - 1, best_config[graph_id])
  975. self.__node_maps_from_median[graph_id] = new_node_map
  976. # Increase overall number of increases.
  977. self.__num_increase_order += 1
  978. def __improve_sum_of_distances(self, timer):
  979. pass
  980. def __median_available(self):
  981. return self.__median_id != np.inf
  982. # def __get_node_image_from_map(self, node_map, node):
  983. # """
  984. # Return ID of the node mapping of `node` in `node_map`.
  985. # Parameters
  986. # ----------
  987. # node_map : list[tuple(int, int)]
  988. # List of node maps where the mapping node is found.
  989. #
  990. # node : int
  991. # The mapping node of this node is returned
  992. # Raises
  993. # ------
  994. # Exception
  995. # If the node with ID `node` is not contained in the source nodes of the node map.
  996. # Returns
  997. # -------
  998. # int
  999. # ID of the mapping of `node`.
  1000. #
  1001. # Notes
  1002. # -----
  1003. # This function is not implemented in the `ged::MedianGraphEstimator` class of the `GEDLIB` library. Instead it is a Python implementation of the `ged::NodeMap::image` function.
  1004. # """
  1005. # if node < len(node_map):
  1006. # return node_map[node][1] if node_map[node][1] < len(node_map) else np.inf
  1007. # else:
  1008. # raise Exception('The node with ID ', str(node), ' is not contained in the source nodes of the node map.')
  1009. # return np.inf
  1010. def __are_graphs_equal(self, g1, g2):
  1011. """
  1012. Check if the two graphs are equal.
  1013. Parameters
  1014. ----------
  1015. g1 : NetworkX graph object
  1016. Graph 1 to be compared.
  1017. g2 : NetworkX graph object
  1018. Graph 2 to be compared.
  1019. Returns
  1020. -------
  1021. bool
  1022. True if the two graph are equal.
  1023. Notes
  1024. -----
  1025. This is not an identical check. Here the two graphs are equal if and only if their original_node_ids, nodes, all node labels, edges and all edge labels are equal. This function is specifically designed for class `MedianGraphEstimator` and should not be used elsewhere.
  1026. """
  1027. # check original node ids.
  1028. if not g1.graph['original_node_ids'] == g2.graph['original_node_ids']:
  1029. return False
  1030. # check nodes.
  1031. nlist1 = [n for n in g1.nodes(data=True)]
  1032. nlist2 = [n for n in g2.nodes(data=True)]
  1033. if not nlist1 == nlist2:
  1034. return False
  1035. # check edges.
  1036. elist1 = [n for n in g1.edges(data=True)]
  1037. elist2 = [n for n in g2.edges(data=True)]
  1038. if not elist1 == elist2:
  1039. return False
  1040. return True
  1041. def compute_my_cost(g, h, node_map):
  1042. cost = 0.0
  1043. for node in g.nodes:
  1044. cost += 0
  1045. def set_label_names(self, node_labels=[], edge_labels=[], node_attrs=[], edge_attrs=[]):
  1046. self.__label_names = {'node_labels': node_labels, 'edge_labels': edge_labels,
  1047. 'node_attrs': node_attrs, 'edge_attrs': edge_attrs}
  1048. def __get_median_node_label(self, node_labels):
  1049. if len(self.__label_names['node_labels']) > 0:
  1050. return self.__get_median_label_symbolic(node_labels)
  1051. elif len(self.__label_names['node_attrs']) > 0:
  1052. return self.__get_median_label_nonsymbolic(node_labels)
  1053. else:
  1054. raise Exception('Node label names are not given.')
  1055. def __get_median_edge_label(self, edge_labels):
  1056. if len(self.__label_names['edge_labels']) > 0:
  1057. return self.__get_median_label_symbolic(edge_labels)
  1058. elif len(self.__label_names['edge_attrs']) > 0:
  1059. return self.__get_median_label_nonsymbolic(edge_labels)
  1060. else:
  1061. raise Exception('Edge label names are not given.')
  1062. def __get_median_label_symbolic(self, labels):
  1063. # Construct histogram.
  1064. hist = {}
  1065. for label in labels:
  1066. label = tuple([kv for kv in label.items()]) # @todo: this may be slow.
  1067. if label not in hist:
  1068. hist[label] = 1
  1069. else:
  1070. hist[label] += 1
  1071. # Return the label that appears most frequently.
  1072. best_count = 0
  1073. median_label = {}
  1074. for label, count in hist.items():
  1075. if count > best_count:
  1076. best_count = count
  1077. median_label = {kv[0]: kv[1] for kv in label}
  1078. return median_label
  1079. def __get_median_label_nonsymbolic(self, labels):
  1080. if len(labels) == 0:
  1081. return {} # @todo
  1082. else:
  1083. # Transform the labels into coordinates and compute mean label as initial solution.
  1084. labels_as_coords = []
  1085. sums = {}
  1086. for key, val in labels[0].items():
  1087. sums[key] = 0
  1088. for label in labels:
  1089. coords = {}
  1090. for key, val in label.items():
  1091. label_f = float(val)
  1092. sums[key] += label_f
  1093. coords[key] = label_f
  1094. labels_as_coords.append(coords)
  1095. median = {}
  1096. for key, val in sums.items():
  1097. median[key] = val / len(labels)
  1098. # Run main loop of Weiszfeld's Algorithm.
  1099. epsilon = 0.0001
  1100. delta = 1.0
  1101. num_itrs = 0
  1102. all_equal = False
  1103. while ((delta > epsilon) and (num_itrs < 100) and (not all_equal)):
  1104. numerator = {}
  1105. for key, val in sums.items():
  1106. numerator[key] = 0
  1107. denominator = 0
  1108. for label_as_coord in labels_as_coords:
  1109. norm = 0
  1110. for key, val in label_as_coord.items():
  1111. norm += (val - median[key]) ** 2
  1112. norm = np.sqrt(norm)
  1113. if norm > 0:
  1114. for key, val in label_as_coord.items():
  1115. numerator[key] += val / norm
  1116. denominator += 1.0 / norm
  1117. if denominator == 0:
  1118. all_equal = True
  1119. else:
  1120. new_median = {}
  1121. delta = 0.0
  1122. for key, val in numerator.items():
  1123. this_median = val / denominator
  1124. new_median[key] = this_median
  1125. delta += np.abs(median[key] - this_median)
  1126. median = new_median
  1127. num_itrs += 1
  1128. # Transform the solution to strings and return it.
  1129. median_label = {}
  1130. for key, val in median.items():
  1131. median_label[key] = str(val)
  1132. return median_label
  1133. # def __get_median_edge_label_symbolic(self, edge_labels):
  1134. # pass
  1135. # def __get_median_edge_label_nonsymbolic(self, edge_labels):
  1136. # if len(edge_labels) == 0:
  1137. # return {}
  1138. # else:
  1139. # # Transform the labels into coordinates and compute mean label as initial solution.
  1140. # edge_labels_as_coords = []
  1141. # sums = {}
  1142. # for key, val in edge_labels[0].items():
  1143. # sums[key] = 0
  1144. # for edge_label in edge_labels:
  1145. # coords = {}
  1146. # for key, val in edge_label.items():
  1147. # label = float(val)
  1148. # sums[key] += label
  1149. # coords[key] = label
  1150. # edge_labels_as_coords.append(coords)
  1151. # median = {}
  1152. # for key, val in sums.items():
  1153. # median[key] = val / len(edge_labels)
  1154. #
  1155. # # Run main loop of Weiszfeld's Algorithm.
  1156. # epsilon = 0.0001
  1157. # delta = 1.0
  1158. # num_itrs = 0
  1159. # all_equal = False
  1160. # while ((delta > epsilon) and (num_itrs < 100) and (not all_equal)):
  1161. # numerator = {}
  1162. # for key, val in sums.items():
  1163. # numerator[key] = 0
  1164. # denominator = 0
  1165. # for edge_label_as_coord in edge_labels_as_coords:
  1166. # norm = 0
  1167. # for key, val in edge_label_as_coord.items():
  1168. # norm += (val - median[key]) ** 2
  1169. # norm += np.sqrt(norm)
  1170. # if norm > 0:
  1171. # for key, val in edge_label_as_coord.items():
  1172. # numerator[key] += val / norm
  1173. # denominator += 1.0 / norm
  1174. # if denominator == 0:
  1175. # all_equal = True
  1176. # else:
  1177. # new_median = {}
  1178. # delta = 0.0
  1179. # for key, val in numerator.items():
  1180. # this_median = val / denominator
  1181. # new_median[key] = this_median
  1182. # delta += np.abs(median[key] - this_median)
  1183. # median = new_median
  1184. #
  1185. # num_itrs += 1
  1186. #
  1187. # # Transform the solution to ged::GXLLabel and return it.
  1188. # median_label = {}
  1189. # for key, val in median.items():
  1190. # median_label[key] = str(val)
  1191. # return median_label

A Python package for graph kernels, graph edit distances and graph pre-image problem.