You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

dataset.py 26 kB

4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. """
  4. Created on Thu Mar 26 18:48:27 2020
  5. @author: ljia
  6. """
  7. import numpy as np
  8. import networkx as nx
  9. import os
  10. from gklearn.dataset import DATASET_META, DataFetcher, DataLoader
  11. class Dataset(object):
  12. def __init__(self, inputs=None, root='datasets', filename_targets=None, targets=None, mode='networkx', clean_labels=True, reload=False, verbose=False, **kwargs):
  13. self._substructures = None
  14. self._node_label_dim = None
  15. self._edge_label_dim = None
  16. self._directed = None
  17. self._dataset_size = None
  18. self._total_node_num = None
  19. self._ave_node_num = None
  20. self._min_node_num = None
  21. self._max_node_num = None
  22. self._total_edge_num = None
  23. self._ave_edge_num = None
  24. self._min_edge_num = None
  25. self._max_edge_num = None
  26. self._ave_node_degree = None
  27. self._min_node_degree = None
  28. self._max_node_degree = None
  29. self._ave_fill_factor = None
  30. self._min_fill_factor = None
  31. self._max_fill_factor = None
  32. self._node_label_nums = None
  33. self._edge_label_nums = None
  34. self._node_attr_dim = None
  35. self._edge_attr_dim = None
  36. self._class_number = None
  37. self._ds_name = None
  38. if inputs is None:
  39. self._graphs = None
  40. self._targets = None
  41. self._node_labels = None
  42. self._edge_labels = None
  43. self._node_attrs = None
  44. self._edge_attrs = None
  45. # If inputs is a list of graphs.
  46. elif isinstance(inputs, list):
  47. node_labels = kwargs.get('node_labels', None)
  48. node_attrs = kwargs.get('node_attrs', None)
  49. edge_labels = kwargs.get('edge_labels', None)
  50. edge_attrs = kwargs.get('edge_attrs', None)
  51. self.load_graphs(inputs, targets=targets)
  52. self.set_labels(node_labels=node_labels, node_attrs=node_attrs, edge_labels=edge_labels, edge_attrs=edge_attrs)
  53. if clean_labels:
  54. self.clean_labels()
  55. elif isinstance(inputs, str):
  56. # If inputs is predefined dataset name.
  57. if inputs in DATASET_META:
  58. self.load_predefined_dataset(inputs, root=root, clean_labels=clean_labels, reload=reload, verbose=verbose)
  59. self._ds_name = inputs
  60. # If the dataset is specially defined, i.g., Alkane_unlabeled, MAO_lite.
  61. elif self.is_special_dataset(inputs):
  62. self.load_special_dataset(inputs, root, clean_labels, reload, verbose)
  63. self._ds_name = inputs
  64. # If inputs is a file name.
  65. elif os.path.isfile(inputs):
  66. self.load_dataset(inputs, filename_targets=filename_targets, clean_labels=clean_labels, **kwargs)
  67. # If inputs is a file name.
  68. else:
  69. raise ValueError('The "inputs" argument "' + inputs + '" is not a valid dataset name or file name.')
  70. else:
  71. raise TypeError('The "inputs" argument cannot be recognized. "Inputs" can be a list of graphs, a predefined dataset name, or a file name of a dataset.')
  72. def load_dataset(self, filename, filename_targets=None, clean_labels=True, **kwargs):
  73. self._graphs, self._targets, label_names = DataLoader(filename, filename_targets=filename_targets, **kwargs).data
  74. self._node_labels = label_names['node_labels']
  75. self._node_attrs = label_names['node_attrs']
  76. self._edge_labels = label_names['edge_labels']
  77. self._edge_attrs = label_names['edge_attrs']
  78. if clean_labels:
  79. self.clean_labels()
  80. def load_graphs(self, graphs, targets=None):
  81. # this has to be followed by set_labels().
  82. self._graphs = graphs
  83. self._targets = targets
  84. # self.set_labels_attrs() # @todo
  85. def load_predefined_dataset(self, ds_name, root='datasets', clean_labels=True, reload=False, verbose=False):
  86. path = DataFetcher(name=ds_name, root=root, reload=reload, verbose=verbose).path
  87. if DATASET_META[ds_name]['database'] == 'tudataset':
  88. ds_file = os.path.join(path, ds_name + '_A.txt')
  89. fn_targets = None
  90. else:
  91. load_files = DATASET_META[ds_name]['load_files']
  92. if isinstance(load_files[0], str):
  93. ds_file = os.path.join(path, load_files[0])
  94. else: # load_files[0] is a list of files.
  95. ds_file = [os.path.join(path, fn) for fn in load_files[0]]
  96. fn_targets = os.path.join(path, load_files[1]) if len(load_files) == 2 else None
  97. if 'extra_params' in DATASET_META[ds_name]:
  98. kwargs = DATASET_META[ds_name]['extra_params']
  99. else:
  100. kwargs = {}
  101. self._graphs, self._targets, label_names = DataLoader(ds_file, filename_targets=fn_targets, **kwargs).data
  102. self._node_labels = label_names['node_labels']
  103. self._node_attrs = label_names['node_attrs']
  104. self._edge_labels = label_names['edge_labels']
  105. self._edge_attrs = label_names['edge_attrs']
  106. if clean_labels:
  107. self.clean_labels()
  108. # Deal with specific datasets.
  109. if ds_name == 'Alkane':
  110. self.trim_dataset(edge_required=True)
  111. self.remove_labels(node_labels=['atom_symbol'])
  112. def set_labels(self, node_labels=[], node_attrs=[], edge_labels=[], edge_attrs=[]):
  113. self._node_labels = node_labels
  114. self._node_attrs = node_attrs
  115. self._edge_labels = edge_labels
  116. self._edge_attrs = edge_attrs
  117. def set_labels_attrs(self, node_labels=None, node_attrs=None, edge_labels=None, edge_attrs=None):
  118. # @todo: remove labels which have only one possible values.
  119. if node_labels is None:
  120. self._node_labels = self._graphs[0].graph['node_labels']
  121. # # graphs are considered node unlabeled if all nodes have the same label.
  122. # infos.update({'node_labeled': is_nl if node_label_num > 1 else False})
  123. if node_attrs is None:
  124. self._node_attrs = self._graphs[0].graph['node_attrs']
  125. # for G in Gn:
  126. # for n in G.nodes(data=True):
  127. # if 'attributes' in n[1]:
  128. # return len(n[1]['attributes'])
  129. # return 0
  130. if edge_labels is None:
  131. self._edge_labels = self._graphs[0].graph['edge_labels']
  132. # # graphs are considered edge unlabeled if all edges have the same label.
  133. # infos.update({'edge_labeled': is_el if edge_label_num > 1 else False})
  134. if edge_attrs is None:
  135. self._edge_attrs = self._graphs[0].graph['edge_attrs']
  136. # for G in Gn:
  137. # if nx.number_of_edges(G) > 0:
  138. # for e in G.edges(data=True):
  139. # if 'attributes' in e[2]:
  140. # return len(e[2]['attributes'])
  141. # return 0
  142. def get_dataset_infos(self, keys=None, params=None):
  143. """Computes and returns the structure and property information of the graph dataset.
  144. Parameters
  145. ----------
  146. keys : list, optional
  147. A list of strings which indicate which informations will be returned. The
  148. possible choices includes:
  149. 'substructures': sub-structures graphs contains, including 'linear', 'non
  150. linear' and 'cyclic'.
  151. 'node_label_dim': whether vertices have symbolic labels.
  152. 'edge_label_dim': whether egdes have symbolic labels.
  153. 'directed': whether graphs in dataset are directed.
  154. 'dataset_size': number of graphs in dataset.
  155. 'total_node_num': total number of vertices of all graphs in dataset.
  156. 'ave_node_num': average number of vertices of graphs in dataset.
  157. 'min_node_num': minimum number of vertices of graphs in dataset.
  158. 'max_node_num': maximum number of vertices of graphs in dataset.
  159. 'total_edge_num': total number of edges of all graphs in dataset.
  160. 'ave_edge_num': average number of edges of graphs in dataset.
  161. 'min_edge_num': minimum number of edges of graphs in dataset.
  162. 'max_edge_num': maximum number of edges of graphs in dataset.
  163. 'ave_node_degree': average vertex degree of graphs in dataset.
  164. 'min_node_degree': minimum vertex degree of graphs in dataset.
  165. 'max_node_degree': maximum vertex degree of graphs in dataset.
  166. 'ave_fill_factor': average fill factor (number_of_edges /
  167. (number_of_nodes ** 2)) of graphs in dataset.
  168. 'min_fill_factor': minimum fill factor of graphs in dataset.
  169. 'max_fill_factor': maximum fill factor of graphs in dataset.
  170. 'node_label_nums': list of numbers of symbolic vertex labels of graphs in dataset.
  171. 'edge_label_nums': list number of symbolic edge labels of graphs in dataset.
  172. 'node_attr_dim': number of dimensions of non-symbolic vertex labels.
  173. Extracted from the 'attributes' attribute of graph nodes.
  174. 'edge_attr_dim': number of dimensions of non-symbolic edge labels.
  175. Extracted from the 'attributes' attribute of graph edges.
  176. 'class_number': number of classes. Only available for classification problems.
  177. 'all_degree_entropy': the entropy of degree distribution of each graph.
  178. 'ave_degree_entropy': the average entropy of degree distribution of all graphs.
  179. All informations above will be returned if `keys` is not given.
  180. params: dict of dict, optional
  181. A dictinary which contains extra parameters for each possible
  182. element in ``keys``.
  183. Return
  184. ------
  185. dict
  186. Information of the graph dataset keyed by `keys`.
  187. """
  188. infos = {}
  189. if keys == None:
  190. keys = [
  191. 'substructures',
  192. 'node_label_dim',
  193. 'edge_label_dim',
  194. 'directed',
  195. 'dataset_size',
  196. 'total_node_num',
  197. 'ave_node_num',
  198. 'min_node_num',
  199. 'max_node_num',
  200. 'total_edge_num',
  201. 'ave_edge_num',
  202. 'min_edge_num',
  203. 'max_edge_num',
  204. 'ave_node_degree',
  205. 'min_node_degree',
  206. 'max_node_degree',
  207. 'ave_fill_factor',
  208. 'min_fill_factor',
  209. 'max_fill_factor',
  210. 'node_label_nums',
  211. 'edge_label_nums',
  212. 'node_attr_dim',
  213. 'edge_attr_dim',
  214. 'class_number',
  215. 'all_degree_entropy',
  216. 'ave_degree_entropy'
  217. ]
  218. # dataset size
  219. if 'dataset_size' in keys:
  220. if self._dataset_size is None:
  221. self._dataset_size = self._get_dataset_size()
  222. infos['dataset_size'] = self._dataset_size
  223. # graph node number
  224. if any(i in keys for i in ['total_node_num', 'ave_node_num', 'min_node_num', 'max_node_num']):
  225. all_node_nums = self._get_all_node_nums()
  226. if 'total_node_num' in keys:
  227. if self._total_node_num is None:
  228. self._total_node_num = self._get_total_node_num(all_node_nums)
  229. infos['total_node_num'] = self._total_node_num
  230. if 'ave_node_num' in keys:
  231. if self._ave_node_num is None:
  232. self._ave_node_num = self._get_ave_node_num(all_node_nums)
  233. infos['ave_node_num'] = self._ave_node_num
  234. if 'min_node_num' in keys:
  235. if self._min_node_num is None:
  236. self._min_node_num = self._get_min_node_num(all_node_nums)
  237. infos['min_node_num'] = self._min_node_num
  238. if 'max_node_num' in keys:
  239. if self._max_node_num is None:
  240. self._max_node_num = self._get_max_node_num(all_node_nums)
  241. infos['max_node_num'] = self._max_node_num
  242. # graph edge number
  243. if any(i in keys for i in ['total_edge_num', 'ave_edge_num', 'min_edge_num', 'max_edge_num']):
  244. all_edge_nums = self._get_all_edge_nums()
  245. if 'total_edge_num' in keys:
  246. if self._total_edge_num is None:
  247. self._total_edge_num = self._get_total_edge_num(all_edge_nums)
  248. infos['total_edge_num'] = self._total_edge_num
  249. if 'ave_edge_num' in keys:
  250. if self._ave_edge_num is None:
  251. self._ave_edge_num = self._get_ave_edge_num(all_edge_nums)
  252. infos['ave_edge_num'] = self._ave_edge_num
  253. if 'max_edge_num' in keys:
  254. if self._max_edge_num is None:
  255. self._max_edge_num = self._get_max_edge_num(all_edge_nums)
  256. infos['max_edge_num'] = self._max_edge_num
  257. if 'min_edge_num' in keys:
  258. if self._min_edge_num is None:
  259. self._min_edge_num = self._get_min_edge_num(all_edge_nums)
  260. infos['min_edge_num'] = self._min_edge_num
  261. # label number
  262. if 'node_label_dim' in keys:
  263. if self._node_label_dim is None:
  264. self._node_label_dim = self._get_node_label_dim()
  265. infos['node_label_dim'] = self._node_label_dim
  266. if 'node_label_nums' in keys:
  267. if self._node_label_nums is None:
  268. self._node_label_nums = {}
  269. for node_label in self._node_labels:
  270. self._node_label_nums[node_label] = self._get_node_label_num(node_label)
  271. infos['node_label_nums'] = self._node_label_nums
  272. if 'edge_label_dim' in keys:
  273. if self._edge_label_dim is None:
  274. self._edge_label_dim = self._get_edge_label_dim()
  275. infos['edge_label_dim'] = self._edge_label_dim
  276. if 'edge_label_nums' in keys:
  277. if self._edge_label_nums is None:
  278. self._edge_label_nums = {}
  279. for edge_label in self._edge_labels:
  280. self._edge_label_nums[edge_label] = self._get_edge_label_num(edge_label)
  281. infos['edge_label_nums'] = self._edge_label_nums
  282. if 'directed' in keys or 'substructures' in keys:
  283. if self._directed is None:
  284. self._directed = self._is_directed()
  285. infos['directed'] = self._directed
  286. # node degree
  287. if any(i in keys for i in ['ave_node_degree', 'max_node_degree', 'min_node_degree']):
  288. all_node_degrees = self._get_all_node_degrees()
  289. if 'ave_node_degree' in keys:
  290. if self._ave_node_degree is None:
  291. self._ave_node_degree = self._get_ave_node_degree(all_node_degrees)
  292. infos['ave_node_degree'] = self._ave_node_degree
  293. if 'max_node_degree' in keys:
  294. if self._max_node_degree is None:
  295. self._max_node_degree = self._get_max_node_degree(all_node_degrees)
  296. infos['max_node_degree'] = self._max_node_degree
  297. if 'min_node_degree' in keys:
  298. if self._min_node_degree is None:
  299. self._min_node_degree = self._get_min_node_degree(all_node_degrees)
  300. infos['min_node_degree'] = self._min_node_degree
  301. # fill factor
  302. if any(i in keys for i in ['ave_fill_factor', 'max_fill_factor', 'min_fill_factor']):
  303. all_fill_factors = self._get_all_fill_factors()
  304. if 'ave_fill_factor' in keys:
  305. if self._ave_fill_factor is None:
  306. self._ave_fill_factor = self._get_ave_fill_factor(all_fill_factors)
  307. infos['ave_fill_factor'] = self._ave_fill_factor
  308. if 'max_fill_factor' in keys:
  309. if self._max_fill_factor is None:
  310. self._max_fill_factor = self._get_max_fill_factor(all_fill_factors)
  311. infos['max_fill_factor'] = self._max_fill_factor
  312. if 'min_fill_factor' in keys:
  313. if self._min_fill_factor is None:
  314. self._min_fill_factor = self._get_min_fill_factor(all_fill_factors)
  315. infos['min_fill_factor'] = self._min_fill_factor
  316. if 'substructures' in keys:
  317. if self._substructures is None:
  318. self._substructures = self._get_substructures()
  319. infos['substructures'] = self._substructures
  320. if 'class_number' in keys:
  321. if self._class_number is None:
  322. self._class_number = self._get_class_number()
  323. infos['class_number'] = self._class_number
  324. if 'node_attr_dim' in keys:
  325. if self._node_attr_dim is None:
  326. self._node_attr_dim = self._get_node_attr_dim()
  327. infos['node_attr_dim'] = self._node_attr_dim
  328. if 'edge_attr_dim' in keys:
  329. if self._edge_attr_dim is None:
  330. self._edge_attr_dim = self._get_edge_attr_dim()
  331. infos['edge_attr_dim'] = self._edge_attr_dim
  332. # entropy of degree distribution.
  333. if 'all_degree_entropy' in keys:
  334. if params is not None and ('all_degree_entropy' in params) and ('base' in params['all_degree_entropy']):
  335. base = params['all_degree_entropy']['base']
  336. else:
  337. base = None
  338. infos['all_degree_entropy'] = self._compute_all_degree_entropy(base=base)
  339. if 'ave_degree_entropy' in keys:
  340. if params is not None and ('ave_degree_entropy' in params) and ('base' in params['ave_degree_entropy']):
  341. base = params['ave_degree_entropy']['base']
  342. else:
  343. base = None
  344. infos['ave_degree_entropy'] = np.mean(self._compute_all_degree_entropy(base=base))
  345. return infos
  346. def print_graph_infos(self, infos):
  347. from collections import OrderedDict
  348. keys = list(infos.keys())
  349. print(OrderedDict(sorted(infos.items(), key=lambda i: keys.index(i[0]))))
  350. def remove_labels(self, node_labels=[], edge_labels=[], node_attrs=[], edge_attrs=[]):
  351. node_labels = [item for item in node_labels if item in self._node_labels]
  352. edge_labels = [item for item in edge_labels if item in self._edge_labels]
  353. node_attrs = [item for item in node_attrs if item in self._node_attrs]
  354. edge_attrs = [item for item in edge_attrs if item in self._edge_attrs]
  355. for g in self._graphs:
  356. for nd in g.nodes():
  357. for nl in node_labels:
  358. del g.nodes[nd][nl]
  359. for na in node_attrs:
  360. del g.nodes[nd][na]
  361. for ed in g.edges():
  362. for el in edge_labels:
  363. del g.edges[ed][el]
  364. for ea in edge_attrs:
  365. del g.edges[ed][ea]
  366. if len(node_labels) > 0:
  367. self._node_labels = [nl for nl in self._node_labels if nl not in node_labels]
  368. if len(edge_labels) > 0:
  369. self._edge_labels = [el for el in self._edge_labels if el not in edge_labels]
  370. if len(node_attrs) > 0:
  371. self._node_attrs = [na for na in self._node_attrs if na not in node_attrs]
  372. if len(edge_attrs) > 0:
  373. self._edge_attrs = [ea for ea in self._edge_attrs if ea not in edge_attrs]
  374. def clean_labels(self):
  375. labels = []
  376. for name in self._node_labels:
  377. label = set()
  378. for G in self._graphs:
  379. label = label | set(nx.get_node_attributes(G, name).values())
  380. if len(label) > 1:
  381. labels.append(name)
  382. break
  383. if len(label) < 2:
  384. for G in self._graphs:
  385. for nd in G.nodes():
  386. del G.nodes[nd][name]
  387. self._node_labels = labels
  388. labels = []
  389. for name in self._edge_labels:
  390. label = set()
  391. for G in self._graphs:
  392. label = label | set(nx.get_edge_attributes(G, name).values())
  393. if len(label) > 1:
  394. labels.append(name)
  395. break
  396. if len(label) < 2:
  397. for G in self._graphs:
  398. for ed in G.edges():
  399. del G.edges[ed][name]
  400. self._edge_labels = labels
  401. labels = []
  402. for name in self._node_attrs:
  403. label = set()
  404. for G in self._graphs:
  405. label = label | set(nx.get_node_attributes(G, name).values())
  406. if len(label) > 1:
  407. labels.append(name)
  408. break
  409. if len(label) < 2:
  410. for G in self._graphs:
  411. for nd in G.nodes():
  412. del G.nodes[nd][name]
  413. self._node_attrs = labels
  414. labels = []
  415. for name in self._edge_attrs:
  416. label = set()
  417. for G in self._graphs:
  418. label = label | set(nx.get_edge_attributes(G, name).values())
  419. if len(label) > 1:
  420. labels.append(name)
  421. break
  422. if len(label) < 2:
  423. for G in self._graphs:
  424. for ed in G.edges():
  425. del G.edges[ed][name]
  426. self._edge_attrs = labels
  427. def cut_graphs(self, range_):
  428. self._graphs = [self._graphs[i] for i in range_]
  429. if self._targets is not None:
  430. self._targets = [self._targets[i] for i in range_]
  431. self.clean_labels()
  432. def trim_dataset(self, edge_required=False):
  433. if edge_required:
  434. trimed_pairs = [(idx, g) for idx, g in enumerate(self._graphs) if (nx.number_of_nodes(g) != 0 and nx.number_of_edges(g) != 0)]
  435. else:
  436. trimed_pairs = [(idx, g) for idx, g in enumerate(self._graphs) if nx.number_of_nodes(g) != 0]
  437. idx = [p[0] for p in trimed_pairs]
  438. self._graphs = [p[1] for p in trimed_pairs]
  439. self._targets = [self._targets[i] for i in idx]
  440. self.clean_labels()
  441. def copy(self):
  442. dataset = Dataset()
  443. graphs = [g.copy() for g in self._graphs] if self._graphs is not None else None
  444. target = self._targets.copy() if self._targets is not None else None
  445. node_labels = self._node_labels.copy() if self._node_labels is not None else None
  446. node_attrs = self._node_attrs.copy() if self._node_attrs is not None else None
  447. edge_labels = self._edge_labels.copy() if self._edge_labels is not None else None
  448. edge_attrs = self._edge_attrs.copy() if self._edge_attrs is not None else None
  449. dataset.load_graphs(graphs, target)
  450. dataset.set_labels(node_labels=node_labels, node_attrs=node_attrs, edge_labels=edge_labels, edge_attrs=edge_attrs)
  451. # @todo: clean_labels and add other class members?
  452. return dataset
  453. def is_special_dataset(self, inputs):
  454. if inputs.endswith('_unlabeled'):
  455. return True
  456. if inputs == 'MAO_lite':
  457. return True
  458. if inputs == 'Monoterpens':
  459. return True
  460. return False
  461. def load_special_dataset(self, inputs, root, clean_labels, reload, verbose):
  462. if inputs.endswith('_unlabeled'):
  463. self.load_predefined_dataset(inputs[:len(inputs) - 10], root=root, clean_labels=clean_labels, reload=reload, verbose=verbose)
  464. self.remove_labels(node_labels=self._node_labels,
  465. edge_labels=self._edge_labels,
  466. node_attrs=self._node_attrs,
  467. edge_attrs=self._edge_attrs)
  468. elif inputs == 'MAO_lite':
  469. self.load_predefined_dataset(inputs[:len(inputs) - 5], root=root, clean_labels=clean_labels, reload=reload, verbose=verbose)
  470. self.remove_labels(edge_labels=['bond_stereo'], node_attrs=['x', 'y'])
  471. elif inputs == 'Monoterpens':
  472. self.load_predefined_dataset('Monoterpenoides', root=root, clean_labels=clean_labels, reload=reload, verbose=verbose)
  473. def get_all_node_labels(self):
  474. node_labels = []
  475. for g in self._graphs:
  476. for n in g.nodes():
  477. nl = tuple(g.nodes[n].items())
  478. if nl not in node_labels:
  479. node_labels.append(nl)
  480. return node_labels
  481. def get_all_edge_labels(self):
  482. edge_labels = []
  483. for g in self._graphs:
  484. for e in g.edges():
  485. el = tuple(g.edges[e].items())
  486. if el not in edge_labels:
  487. edge_labels.append(el)
  488. return edge_labels
  489. def _get_dataset_size(self):
  490. return len(self._graphs)
  491. def _get_all_node_nums(self):
  492. return [nx.number_of_nodes(G) for G in self._graphs]
  493. def _get_total_node_nums(self, all_node_nums):
  494. return np.sum(all_node_nums)
  495. def _get_ave_node_num(self, all_node_nums):
  496. return np.mean(all_node_nums)
  497. def _get_min_node_num(self, all_node_nums):
  498. return np.amin(all_node_nums)
  499. def _get_max_node_num(self, all_node_nums):
  500. return np.amax(all_node_nums)
  501. def _get_all_edge_nums(self):
  502. return [nx.number_of_edges(G) for G in self._graphs]
  503. def _get_total_edge_nums(self, all_edge_nums):
  504. return np.sum(all_edge_nums)
  505. def _get_ave_edge_num(self, all_edge_nums):
  506. return np.mean(all_edge_nums)
  507. def _get_min_edge_num(self, all_edge_nums):
  508. return np.amin(all_edge_nums)
  509. def _get_max_edge_num(self, all_edge_nums):
  510. return np.amax(all_edge_nums)
  511. def _get_node_label_dim(self):
  512. return len(self._node_labels)
  513. def _get_node_label_num(self, node_label):
  514. nl = set()
  515. for G in self._graphs:
  516. nl = nl | set(nx.get_node_attributes(G, node_label).values())
  517. return len(nl)
  518. def _get_edge_label_dim(self):
  519. return len(self._edge_labels)
  520. def _get_edge_label_num(self, edge_label):
  521. el = set()
  522. for G in self._graphs:
  523. el = el | set(nx.get_edge_attributes(G, edge_label).values())
  524. return len(el)
  525. def _is_directed(self):
  526. return nx.is_directed(self._graphs[0])
  527. def _get_all_node_degrees(self):
  528. return [np.mean(list(dict(G.degree()).values())) for G in self._graphs]
  529. def _get_ave_node_degree(self, all_node_degrees):
  530. return np.mean(all_node_degrees)
  531. def _get_max_node_degree(self, all_node_degrees):
  532. return np.amax(all_node_degrees)
  533. def _get_min_node_degree(self, all_node_degrees):
  534. return np.amin(all_node_degrees)
  535. def _get_all_fill_factors(self):
  536. """Get fill factor, the number of non-zero entries in the adjacency matrix.
  537. Returns
  538. -------
  539. list[float]
  540. List of fill factors for all graphs.
  541. """
  542. return [nx.number_of_edges(G) / (nx.number_of_nodes(G) ** 2) for G in self._graphs]
  543. def _get_ave_fill_factor(self, all_fill_factors):
  544. return np.mean(all_fill_factors)
  545. def _get_max_fill_factor(self, all_fill_factors):
  546. return np.amax(all_fill_factors)
  547. def _get_min_fill_factor(self, all_fill_factors):
  548. return np.amin(all_fill_factors)
  549. def _get_substructures(self):
  550. subs = set()
  551. for G in self._graphs:
  552. degrees = list(dict(G.degree()).values())
  553. if any(i == 2 for i in degrees):
  554. subs.add('linear')
  555. if np.amax(degrees) >= 3:
  556. subs.add('non linear')
  557. if 'linear' in subs and 'non linear' in subs:
  558. break
  559. if self._directed:
  560. for G in self._graphs:
  561. if len(list(nx.find_cycle(G))) > 0:
  562. subs.add('cyclic')
  563. break
  564. # else:
  565. # # @todo: this method does not work for big graph with large amount of edges like D&D, try a better way.
  566. # upper = np.amin([nx.number_of_edges(G) for G in Gn]) * 2 + 10
  567. # for G in Gn:
  568. # if (nx.number_of_edges(G) < upper):
  569. # cyc = list(nx.simple_cycles(G.to_directed()))
  570. # if any(len(i) > 2 for i in cyc):
  571. # subs.add('cyclic')
  572. # break
  573. # if 'cyclic' not in subs:
  574. # for G in Gn:
  575. # cyc = list(nx.simple_cycles(G.to_directed()))
  576. # if any(len(i) > 2 for i in cyc):
  577. # subs.add('cyclic')
  578. # break
  579. return subs
  580. def _get_class_num(self):
  581. return len(set(self._targets))
  582. def _get_node_attr_dim(self):
  583. return len(self._node_attrs)
  584. def _get_edge_attr_dim(self):
  585. return len(self._edge_attrs)
  586. def _compute_all_degree_entropy(self, base=None):
  587. """Compute the entropy of degree distribution of each graph.
  588. Parameters
  589. ----------
  590. base : float, optional
  591. The logarithmic base to use. The default is ``e`` (natural logarithm).
  592. Returns
  593. -------
  594. degree_entropy : float
  595. The calculated entropy.
  596. """
  597. from gklearn.utils.stats import entropy
  598. degree_entropy = []
  599. for g in self._graphs:
  600. degrees = list(dict(g.degree()).values())
  601. en = entropy(degrees, base=base)
  602. degree_entropy.append(en)
  603. return degree_entropy
  604. @property
  605. def graphs(self):
  606. return self._graphs
  607. @property
  608. def targets(self):
  609. return self._targets
  610. @property
  611. def node_labels(self):
  612. return self._node_labels
  613. @property
  614. def edge_labels(self):
  615. return self._edge_labels
  616. @property
  617. def node_attrs(self):
  618. return self._node_attrs
  619. @property
  620. def edge_attrs(self):
  621. return self._edge_attrs
  622. def split_dataset_by_target(dataset):
  623. from gklearn.preimage.utils import get_same_item_indices
  624. graphs = dataset.graphs
  625. targets = dataset.targets
  626. datasets = []
  627. idx_targets = get_same_item_indices(targets)
  628. for key, val in idx_targets.items():
  629. sub_graphs = [graphs[i] for i in val]
  630. sub_dataset = Dataset()
  631. sub_dataset.load_graphs(sub_graphs, [key] * len(val))
  632. node_labels = dataset.node_labels.copy() if dataset.node_labels is not None else None
  633. node_attrs = dataset.node_attrs.copy() if dataset.node_attrs is not None else None
  634. edge_labels = dataset.edge_labels.copy() if dataset.edge_labels is not None else None
  635. edge_attrs = dataset.edge_attrs.copy() if dataset.edge_attrs is not None else None
  636. sub_dataset.set_labels(node_labels=node_labels, node_attrs=node_attrs, edge_labels=edge_labels, edge_attrs=edge_attrs)
  637. datasets.append(sub_dataset)
  638. # @todo: clean_labels?
  639. return datasets

A Python package for graph kernels, graph edit distances and graph pre-image problem.