You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

dataset.py 26 kB

4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. """
  4. Created on Thu Mar 26 18:48:27 2020
  5. @author: ljia
  6. """
  7. import numpy as np
  8. import networkx as nx
  9. import os
  10. from gklearn.dataset import DATASET_META, DataFetcher, DataLoader
  11. class Dataset(object):
  12. def __init__(self, inputs=None, root='datasets', filename_targets=None, targets=None, mode='networkx', clean_labels=True, reload=False, verbose=False, **kwargs):
  13. self._substructures = None
  14. self._node_label_dim = None
  15. self._edge_label_dim = None
  16. self._directed = None
  17. self._dataset_size = None
  18. self._total_node_num = None
  19. self._ave_node_num = None
  20. self._min_node_num = None
  21. self._max_node_num = None
  22. self._total_edge_num = None
  23. self._ave_edge_num = None
  24. self._min_edge_num = None
  25. self._max_edge_num = None
  26. self._ave_node_degree = None
  27. self._min_node_degree = None
  28. self._max_node_degree = None
  29. self._ave_fill_factor = None
  30. self._min_fill_factor = None
  31. self._max_fill_factor = None
  32. self._node_label_nums = None
  33. self._edge_label_nums = None
  34. self._node_attr_dim = None
  35. self._edge_attr_dim = None
  36. self._class_number = None
  37. self._ds_name = None
  38. if inputs is None:
  39. self._graphs = None
  40. self._targets = None
  41. self._node_labels = None
  42. self._edge_labels = None
  43. self._node_attrs = None
  44. self._edge_attrs = None
  45. # If inputs is a list of graphs.
  46. elif isinstance(inputs, list):
  47. node_labels = kwargs.get('node_labels', None)
  48. node_attrs = kwargs.get('node_attrs', None)
  49. edge_labels = kwargs.get('edge_labels', None)
  50. edge_attrs = kwargs.get('edge_attrs', None)
  51. self.load_graphs(inputs, targets=targets)
  52. self.set_labels(node_labels=node_labels, node_attrs=node_attrs, edge_labels=edge_labels, edge_attrs=edge_attrs)
  53. if clean_labels:
  54. self.clean_labels()
  55. elif isinstance(inputs, str):
  56. # If inputs is predefined dataset name.
  57. if inputs in DATASET_META:
  58. self.load_predefined_dataset(inputs, root=root, clean_labels=clean_labels, reload=reload, verbose=verbose)
  59. self._ds_name = inputs
  60. # If the dataset is specially defined, i.g., Alkane_unlabeled, MAO_lite.
  61. elif self.is_special_dataset(inputs):
  62. self.load_special_dataset(inputs, root, clean_labels, reload, verbose)
  63. self._ds_name = inputs
  64. # If inputs is a file name.
  65. elif os.path.isfile(inputs):
  66. self.load_dataset(inputs, filename_targets=filename_targets, clean_labels=clean_labels, **kwargs)
  67. # If inputs is a file name.
  68. else:
  69. raise ValueError('The "inputs" argument "' + inputs + '" is not a valid dataset name or file name.')
  70. else:
  71. raise TypeError('The "inputs" argument cannot be recognized. "Inputs" can be a list of graphs, a predefined dataset name, or a file name of a dataset.')
  72. def load_dataset(self, filename, filename_targets=None, clean_labels=True, **kwargs):
  73. self._graphs, self._targets, label_names = DataLoader(filename, filename_targets=filename_targets, **kwargs).data
  74. self._node_labels = label_names['node_labels']
  75. self._node_attrs = label_names['node_attrs']
  76. self._edge_labels = label_names['edge_labels']
  77. self._edge_attrs = label_names['edge_attrs']
  78. if clean_labels:
  79. self.clean_labels()
  80. def load_graphs(self, graphs, targets=None):
  81. # this has to be followed by set_labels().
  82. self._graphs = graphs
  83. self._targets = targets
  84. # self.set_labels_attrs() # @todo
  85. def load_predefined_dataset(self, ds_name, root='datasets', clean_labels=True, reload=False, verbose=False):
  86. path = DataFetcher(name=ds_name, root=root, reload=reload, verbose=verbose).path
  87. if DATASET_META[ds_name]['database'] == 'tudataset':
  88. ds_file = os.path.join(path, ds_name + '_A.txt')
  89. fn_targets = None
  90. else:
  91. load_files = DATASET_META[ds_name]['load_files']
  92. if isinstance(load_files[0], str):
  93. ds_file = os.path.join(path, load_files[0])
  94. else: # load_files[0] is a list of files.
  95. ds_file = [os.path.join(path, fn) for fn in load_files[0]]
  96. fn_targets = os.path.join(path, load_files[1]) if len(load_files) == 2 else None
  97. if 'extra_params' in DATASET_META[ds_name]:
  98. kwargs = DATASET_META[ds_name]['extra_params']
  99. self._graphs, self._targets, label_names = DataLoader(ds_file, filename_targets=fn_targets, **kwargs).data
  100. self._node_labels = label_names['node_labels']
  101. self._node_attrs = label_names['node_attrs']
  102. self._edge_labels = label_names['edge_labels']
  103. self._edge_attrs = label_names['edge_attrs']
  104. if clean_labels:
  105. self.clean_labels()
  106. # Deal with specific datasets.
  107. if ds_name == 'Alkane':
  108. self.trim_dataset(edge_required=True)
  109. self.remove_labels(node_labels=['atom_symbol'])
  110. def set_labels(self, node_labels=[], node_attrs=[], edge_labels=[], edge_attrs=[]):
  111. self._node_labels = node_labels
  112. self._node_attrs = node_attrs
  113. self._edge_labels = edge_labels
  114. self._edge_attrs = edge_attrs
  115. def set_labels_attrs(self, node_labels=None, node_attrs=None, edge_labels=None, edge_attrs=None):
  116. # @todo: remove labels which have only one possible values.
  117. if node_labels is None:
  118. self._node_labels = self._graphs[0].graph['node_labels']
  119. # # graphs are considered node unlabeled if all nodes have the same label.
  120. # infos.update({'node_labeled': is_nl if node_label_num > 1 else False})
  121. if node_attrs is None:
  122. self._node_attrs = self._graphs[0].graph['node_attrs']
  123. # for G in Gn:
  124. # for n in G.nodes(data=True):
  125. # if 'attributes' in n[1]:
  126. # return len(n[1]['attributes'])
  127. # return 0
  128. if edge_labels is None:
  129. self._edge_labels = self._graphs[0].graph['edge_labels']
  130. # # graphs are considered edge unlabeled if all edges have the same label.
  131. # infos.update({'edge_labeled': is_el if edge_label_num > 1 else False})
  132. if edge_attrs is None:
  133. self._edge_attrs = self._graphs[0].graph['edge_attrs']
  134. # for G in Gn:
  135. # if nx.number_of_edges(G) > 0:
  136. # for e in G.edges(data=True):
  137. # if 'attributes' in e[2]:
  138. # return len(e[2]['attributes'])
  139. # return 0
  140. def get_dataset_infos(self, keys=None, params=None):
  141. """Computes and returns the structure and property information of the graph dataset.
  142. Parameters
  143. ----------
  144. keys : list, optional
  145. A list of strings which indicate which informations will be returned. The
  146. possible choices includes:
  147. 'substructures': sub-structures graphs contains, including 'linear', 'non
  148. linear' and 'cyclic'.
  149. 'node_label_dim': whether vertices have symbolic labels.
  150. 'edge_label_dim': whether egdes have symbolic labels.
  151. 'directed': whether graphs in dataset are directed.
  152. 'dataset_size': number of graphs in dataset.
  153. 'total_node_num': total number of vertices of all graphs in dataset.
  154. 'ave_node_num': average number of vertices of graphs in dataset.
  155. 'min_node_num': minimum number of vertices of graphs in dataset.
  156. 'max_node_num': maximum number of vertices of graphs in dataset.
  157. 'total_edge_num': total number of edges of all graphs in dataset.
  158. 'ave_edge_num': average number of edges of graphs in dataset.
  159. 'min_edge_num': minimum number of edges of graphs in dataset.
  160. 'max_edge_num': maximum number of edges of graphs in dataset.
  161. 'ave_node_degree': average vertex degree of graphs in dataset.
  162. 'min_node_degree': minimum vertex degree of graphs in dataset.
  163. 'max_node_degree': maximum vertex degree of graphs in dataset.
  164. 'ave_fill_factor': average fill factor (number_of_edges /
  165. (number_of_nodes ** 2)) of graphs in dataset.
  166. 'min_fill_factor': minimum fill factor of graphs in dataset.
  167. 'max_fill_factor': maximum fill factor of graphs in dataset.
  168. 'node_label_nums': list of numbers of symbolic vertex labels of graphs in dataset.
  169. 'edge_label_nums': list number of symbolic edge labels of graphs in dataset.
  170. 'node_attr_dim': number of dimensions of non-symbolic vertex labels.
  171. Extracted from the 'attributes' attribute of graph nodes.
  172. 'edge_attr_dim': number of dimensions of non-symbolic edge labels.
  173. Extracted from the 'attributes' attribute of graph edges.
  174. 'class_number': number of classes. Only available for classification problems.
  175. 'all_degree_entropy': the entropy of degree distribution of each graph.
  176. 'ave_degree_entropy': the average entropy of degree distribution of all graphs.
  177. All informations above will be returned if `keys` is not given.
  178. params: dict of dict, optional
  179. A dictinary which contains extra parameters for each possible
  180. element in ``keys``.
  181. Return
  182. ------
  183. dict
  184. Information of the graph dataset keyed by `keys`.
  185. """
  186. infos = {}
  187. if keys == None:
  188. keys = [
  189. 'substructures',
  190. 'node_label_dim',
  191. 'edge_label_dim',
  192. 'directed',
  193. 'dataset_size',
  194. 'total_node_num',
  195. 'ave_node_num',
  196. 'min_node_num',
  197. 'max_node_num',
  198. 'total_edge_num',
  199. 'ave_edge_num',
  200. 'min_edge_num',
  201. 'max_edge_num',
  202. 'ave_node_degree',
  203. 'min_node_degree',
  204. 'max_node_degree',
  205. 'ave_fill_factor',
  206. 'min_fill_factor',
  207. 'max_fill_factor',
  208. 'node_label_nums',
  209. 'edge_label_nums',
  210. 'node_attr_dim',
  211. 'edge_attr_dim',
  212. 'class_number',
  213. 'all_degree_entropy',
  214. 'ave_degree_entropy'
  215. ]
  216. # dataset size
  217. if 'dataset_size' in keys:
  218. if self._dataset_size is None:
  219. self._dataset_size = self._get_dataset_size()
  220. infos['dataset_size'] = self._dataset_size
  221. # graph node number
  222. if any(i in keys for i in ['total_node_num', 'ave_node_num', 'min_node_num', 'max_node_num']):
  223. all_node_nums = self._get_all_node_nums()
  224. if 'total_node_num' in keys:
  225. if self._total_node_num is None:
  226. self._total_node_num = self._get_total_node_num(all_node_nums)
  227. infos['total_node_num'] = self._total_node_num
  228. if 'ave_node_num' in keys:
  229. if self._ave_node_num is None:
  230. self._ave_node_num = self._get_ave_node_num(all_node_nums)
  231. infos['ave_node_num'] = self._ave_node_num
  232. if 'min_node_num' in keys:
  233. if self._min_node_num is None:
  234. self._min_node_num = self._get_min_node_num(all_node_nums)
  235. infos['min_node_num'] = self._min_node_num
  236. if 'max_node_num' in keys:
  237. if self._max_node_num is None:
  238. self._max_node_num = self._get_max_node_num(all_node_nums)
  239. infos['max_node_num'] = self._max_node_num
  240. # graph edge number
  241. if any(i in keys for i in ['total_edge_num', 'ave_edge_num', 'min_edge_num', 'max_edge_num']):
  242. all_edge_nums = self._get_all_edge_nums()
  243. if 'total_edge_num' in keys:
  244. if self._total_edge_num is None:
  245. self._total_edge_num = self._get_total_edge_num(all_edge_nums)
  246. infos['total_edge_num'] = self._total_edge_num
  247. if 'ave_edge_num' in keys:
  248. if self._ave_edge_num is None:
  249. self._ave_edge_num = self._get_ave_edge_num(all_edge_nums)
  250. infos['ave_edge_num'] = self._ave_edge_num
  251. if 'max_edge_num' in keys:
  252. if self._max_edge_num is None:
  253. self._max_edge_num = self._get_max_edge_num(all_edge_nums)
  254. infos['max_edge_num'] = self._max_edge_num
  255. if 'min_edge_num' in keys:
  256. if self._min_edge_num is None:
  257. self._min_edge_num = self._get_min_edge_num(all_edge_nums)
  258. infos['min_edge_num'] = self._min_edge_num
  259. # label number
  260. if 'node_label_dim' in keys:
  261. if self._node_label_dim is None:
  262. self._node_label_dim = self._get_node_label_dim()
  263. infos['node_label_dim'] = self._node_label_dim
  264. if 'node_label_nums' in keys:
  265. if self._node_label_nums is None:
  266. self._node_label_nums = {}
  267. for node_label in self._node_labels:
  268. self._node_label_nums[node_label] = self._get_node_label_num(node_label)
  269. infos['node_label_nums'] = self._node_label_nums
  270. if 'edge_label_dim' in keys:
  271. if self._edge_label_dim is None:
  272. self._edge_label_dim = self._get_edge_label_dim()
  273. infos['edge_label_dim'] = self._edge_label_dim
  274. if 'edge_label_nums' in keys:
  275. if self._edge_label_nums is None:
  276. self._edge_label_nums = {}
  277. for edge_label in self._edge_labels:
  278. self._edge_label_nums[edge_label] = self._get_edge_label_num(edge_label)
  279. infos['edge_label_nums'] = self._edge_label_nums
  280. if 'directed' in keys or 'substructures' in keys:
  281. if self._directed is None:
  282. self._directed = self._is_directed()
  283. infos['directed'] = self._directed
  284. # node degree
  285. if any(i in keys for i in ['ave_node_degree', 'max_node_degree', 'min_node_degree']):
  286. all_node_degrees = self._get_all_node_degrees()
  287. if 'ave_node_degree' in keys:
  288. if self._ave_node_degree is None:
  289. self._ave_node_degree = self._get_ave_node_degree(all_node_degrees)
  290. infos['ave_node_degree'] = self._ave_node_degree
  291. if 'max_node_degree' in keys:
  292. if self._max_node_degree is None:
  293. self._max_node_degree = self._get_max_node_degree(all_node_degrees)
  294. infos['max_node_degree'] = self._max_node_degree
  295. if 'min_node_degree' in keys:
  296. if self._min_node_degree is None:
  297. self._min_node_degree = self._get_min_node_degree(all_node_degrees)
  298. infos['min_node_degree'] = self._min_node_degree
  299. # fill factor
  300. if any(i in keys for i in ['ave_fill_factor', 'max_fill_factor', 'min_fill_factor']):
  301. all_fill_factors = self._get_all_fill_factors()
  302. if 'ave_fill_factor' in keys:
  303. if self._ave_fill_factor is None:
  304. self._ave_fill_factor = self._get_ave_fill_factor(all_fill_factors)
  305. infos['ave_fill_factor'] = self._ave_fill_factor
  306. if 'max_fill_factor' in keys:
  307. if self._max_fill_factor is None:
  308. self._max_fill_factor = self._get_max_fill_factor(all_fill_factors)
  309. infos['max_fill_factor'] = self._max_fill_factor
  310. if 'min_fill_factor' in keys:
  311. if self._min_fill_factor is None:
  312. self._min_fill_factor = self._get_min_fill_factor(all_fill_factors)
  313. infos['min_fill_factor'] = self._min_fill_factor
  314. if 'substructures' in keys:
  315. if self._substructures is None:
  316. self._substructures = self._get_substructures()
  317. infos['substructures'] = self._substructures
  318. if 'class_number' in keys:
  319. if self._class_number is None:
  320. self._class_number = self._get_class_number()
  321. infos['class_number'] = self._class_number
  322. if 'node_attr_dim' in keys:
  323. if self._node_attr_dim is None:
  324. self._node_attr_dim = self._get_node_attr_dim()
  325. infos['node_attr_dim'] = self._node_attr_dim
  326. if 'edge_attr_dim' in keys:
  327. if self._edge_attr_dim is None:
  328. self._edge_attr_dim = self._get_edge_attr_dim()
  329. infos['edge_attr_dim'] = self._edge_attr_dim
  330. # entropy of degree distribution.
  331. if 'all_degree_entropy' in keys:
  332. if params is not None and ('all_degree_entropy' in params) and ('base' in params['all_degree_entropy']):
  333. base = params['all_degree_entropy']['base']
  334. else:
  335. base = None
  336. infos['all_degree_entropy'] = self._compute_all_degree_entropy(base=base)
  337. if 'ave_degree_entropy' in keys:
  338. if params is not None and ('ave_degree_entropy' in params) and ('base' in params['ave_degree_entropy']):
  339. base = params['ave_degree_entropy']['base']
  340. else:
  341. base = None
  342. infos['ave_degree_entropy'] = np.mean(self._compute_all_degree_entropy(base=base))
  343. return infos
  344. def print_graph_infos(self, infos):
  345. from collections import OrderedDict
  346. keys = list(infos.keys())
  347. print(OrderedDict(sorted(infos.items(), key=lambda i: keys.index(i[0]))))
  348. def remove_labels(self, node_labels=[], edge_labels=[], node_attrs=[], edge_attrs=[]):
  349. node_labels = [item for item in node_labels if item in self._node_labels]
  350. edge_labels = [item for item in edge_labels if item in self._edge_labels]
  351. node_attrs = [item for item in node_attrs if item in self._node_attrs]
  352. edge_attrs = [item for item in edge_attrs if item in self._edge_attrs]
  353. for g in self._graphs:
  354. for nd in g.nodes():
  355. for nl in node_labels:
  356. del g.nodes[nd][nl]
  357. for na in node_attrs:
  358. del g.nodes[nd][na]
  359. for ed in g.edges():
  360. for el in edge_labels:
  361. del g.edges[ed][el]
  362. for ea in edge_attrs:
  363. del g.edges[ed][ea]
  364. if len(node_labels) > 0:
  365. self._node_labels = [nl for nl in self._node_labels if nl not in node_labels]
  366. if len(edge_labels) > 0:
  367. self._edge_labels = [el for el in self._edge_labels if el not in edge_labels]
  368. if len(node_attrs) > 0:
  369. self._node_attrs = [na for na in self._node_attrs if na not in node_attrs]
  370. if len(edge_attrs) > 0:
  371. self._edge_attrs = [ea for ea in self._edge_attrs if ea not in edge_attrs]
  372. def clean_labels(self):
  373. labels = []
  374. for name in self._node_labels:
  375. label = set()
  376. for G in self._graphs:
  377. label = label | set(nx.get_node_attributes(G, name).values())
  378. if len(label) > 1:
  379. labels.append(name)
  380. break
  381. if len(label) < 2:
  382. for G in self._graphs:
  383. for nd in G.nodes():
  384. del G.nodes[nd][name]
  385. self._node_labels = labels
  386. labels = []
  387. for name in self._edge_labels:
  388. label = set()
  389. for G in self._graphs:
  390. label = label | set(nx.get_edge_attributes(G, name).values())
  391. if len(label) > 1:
  392. labels.append(name)
  393. break
  394. if len(label) < 2:
  395. for G in self._graphs:
  396. for ed in G.edges():
  397. del G.edges[ed][name]
  398. self._edge_labels = labels
  399. labels = []
  400. for name in self._node_attrs:
  401. label = set()
  402. for G in self._graphs:
  403. label = label | set(nx.get_node_attributes(G, name).values())
  404. if len(label) > 1:
  405. labels.append(name)
  406. break
  407. if len(label) < 2:
  408. for G in self._graphs:
  409. for nd in G.nodes():
  410. del G.nodes[nd][name]
  411. self._node_attrs = labels
  412. labels = []
  413. for name in self._edge_attrs:
  414. label = set()
  415. for G in self._graphs:
  416. label = label | set(nx.get_edge_attributes(G, name).values())
  417. if len(label) > 1:
  418. labels.append(name)
  419. break
  420. if len(label) < 2:
  421. for G in self._graphs:
  422. for ed in G.edges():
  423. del G.edges[ed][name]
  424. self._edge_attrs = labels
  425. def cut_graphs(self, range_):
  426. self._graphs = [self._graphs[i] for i in range_]
  427. if self._targets is not None:
  428. self._targets = [self._targets[i] for i in range_]
  429. self.clean_labels()
  430. def trim_dataset(self, edge_required=False):
  431. if edge_required:
  432. trimed_pairs = [(idx, g) for idx, g in enumerate(self._graphs) if (nx.number_of_nodes(g) != 0 and nx.number_of_edges(g) != 0)]
  433. else:
  434. trimed_pairs = [(idx, g) for idx, g in enumerate(self._graphs) if nx.number_of_nodes(g) != 0]
  435. idx = [p[0] for p in trimed_pairs]
  436. self._graphs = [p[1] for p in trimed_pairs]
  437. self._targets = [self._targets[i] for i in idx]
  438. self.clean_labels()
  439. def copy(self):
  440. dataset = Dataset()
  441. graphs = [g.copy() for g in self._graphs] if self._graphs is not None else None
  442. target = self._targets.copy() if self._targets is not None else None
  443. node_labels = self._node_labels.copy() if self._node_labels is not None else None
  444. node_attrs = self._node_attrs.copy() if self._node_attrs is not None else None
  445. edge_labels = self._edge_labels.copy() if self._edge_labels is not None else None
  446. edge_attrs = self._edge_attrs.copy() if self._edge_attrs is not None else None
  447. dataset.load_graphs(graphs, target)
  448. dataset.set_labels(node_labels=node_labels, node_attrs=node_attrs, edge_labels=edge_labels, edge_attrs=edge_attrs)
  449. # @todo: clean_labels and add other class members?
  450. return dataset
  451. def is_special_dataset(self, inputs):
  452. if inputs.endswith('_unlabeled'):
  453. return True
  454. if inputs == 'MAO_lite':
  455. return True
  456. if inputs == 'Monoterpens':
  457. return True
  458. return False
  459. def load_special_dataset(self, inputs, root, clean_labels, reload, verbose):
  460. if inputs.endswith('_unlabeled'):
  461. self.load_predefined_dataset(inputs[:len(inputs) - 10], root=root, clean_labels=clean_labels, reload=reload, verbose=verbose)
  462. self.remove_labels(node_labels=self._node_labels,
  463. edge_labels=self._edge_labels,
  464. node_attrs=self._node_attrs,
  465. edge_attrs=self._edge_attrs)
  466. elif inputs == 'MAO_lite':
  467. self.load_predefined_dataset(inputs[:len(inputs) - 5], root=root, clean_labels=clean_labels, reload=reload, verbose=verbose)
  468. self.remove_labels(edge_labels=['bond_stereo'], node_attrs=['x', 'y'])
  469. elif inputs == 'Monoterpens':
  470. self.load_predefined_dataset('Monoterpenoides', root=root, clean_labels=clean_labels, reload=reload, verbose=verbose)
  471. def get_all_node_labels(self):
  472. node_labels = []
  473. for g in self._graphs:
  474. for n in g.nodes():
  475. nl = tuple(g.nodes[n].items())
  476. if nl not in node_labels:
  477. node_labels.append(nl)
  478. return node_labels
  479. def get_all_edge_labels(self):
  480. edge_labels = []
  481. for g in self._graphs:
  482. for e in g.edges():
  483. el = tuple(g.edges[e].items())
  484. if el not in edge_labels:
  485. edge_labels.append(el)
  486. return edge_labels
  487. def _get_dataset_size(self):
  488. return len(self._graphs)
  489. def _get_all_node_nums(self):
  490. return [nx.number_of_nodes(G) for G in self._graphs]
  491. def _get_total_node_nums(self, all_node_nums):
  492. return np.sum(all_node_nums)
  493. def _get_ave_node_num(self, all_node_nums):
  494. return np.mean(all_node_nums)
  495. def _get_min_node_num(self, all_node_nums):
  496. return np.amin(all_node_nums)
  497. def _get_max_node_num(self, all_node_nums):
  498. return np.amax(all_node_nums)
  499. def _get_all_edge_nums(self):
  500. return [nx.number_of_edges(G) for G in self._graphs]
  501. def _get_total_edge_nums(self, all_edge_nums):
  502. return np.sum(all_edge_nums)
  503. def _get_ave_edge_num(self, all_edge_nums):
  504. return np.mean(all_edge_nums)
  505. def _get_min_edge_num(self, all_edge_nums):
  506. return np.amin(all_edge_nums)
  507. def _get_max_edge_num(self, all_edge_nums):
  508. return np.amax(all_edge_nums)
  509. def _get_node_label_dim(self):
  510. return len(self._node_labels)
  511. def _get_node_label_num(self, node_label):
  512. nl = set()
  513. for G in self._graphs:
  514. nl = nl | set(nx.get_node_attributes(G, node_label).values())
  515. return len(nl)
  516. def _get_edge_label_dim(self):
  517. return len(self._edge_labels)
  518. def _get_edge_label_num(self, edge_label):
  519. el = set()
  520. for G in self._graphs:
  521. el = el | set(nx.get_edge_attributes(G, edge_label).values())
  522. return len(el)
  523. def _is_directed(self):
  524. return nx.is_directed(self._graphs[0])
  525. def _get_all_node_degrees(self):
  526. return [np.mean(list(dict(G.degree()).values())) for G in self._graphs]
  527. def _get_ave_node_degree(self, all_node_degrees):
  528. return np.mean(all_node_degrees)
  529. def _get_max_node_degree(self, all_node_degrees):
  530. return np.amax(all_node_degrees)
  531. def _get_min_node_degree(self, all_node_degrees):
  532. return np.amin(all_node_degrees)
  533. def _get_all_fill_factors(self):
  534. """Get fill factor, the number of non-zero entries in the adjacency matrix.
  535. Returns
  536. -------
  537. list[float]
  538. List of fill factors for all graphs.
  539. """
  540. return [nx.number_of_edges(G) / (nx.number_of_nodes(G) ** 2) for G in self._graphs]
  541. def _get_ave_fill_factor(self, all_fill_factors):
  542. return np.mean(all_fill_factors)
  543. def _get_max_fill_factor(self, all_fill_factors):
  544. return np.amax(all_fill_factors)
  545. def _get_min_fill_factor(self, all_fill_factors):
  546. return np.amin(all_fill_factors)
  547. def _get_substructures(self):
  548. subs = set()
  549. for G in self._graphs:
  550. degrees = list(dict(G.degree()).values())
  551. if any(i == 2 for i in degrees):
  552. subs.add('linear')
  553. if np.amax(degrees) >= 3:
  554. subs.add('non linear')
  555. if 'linear' in subs and 'non linear' in subs:
  556. break
  557. if self._directed:
  558. for G in self._graphs:
  559. if len(list(nx.find_cycle(G))) > 0:
  560. subs.add('cyclic')
  561. break
  562. # else:
  563. # # @todo: this method does not work for big graph with large amount of edges like D&D, try a better way.
  564. # upper = np.amin([nx.number_of_edges(G) for G in Gn]) * 2 + 10
  565. # for G in Gn:
  566. # if (nx.number_of_edges(G) < upper):
  567. # cyc = list(nx.simple_cycles(G.to_directed()))
  568. # if any(len(i) > 2 for i in cyc):
  569. # subs.add('cyclic')
  570. # break
  571. # if 'cyclic' not in subs:
  572. # for G in Gn:
  573. # cyc = list(nx.simple_cycles(G.to_directed()))
  574. # if any(len(i) > 2 for i in cyc):
  575. # subs.add('cyclic')
  576. # break
  577. return subs
  578. def _get_class_num(self):
  579. return len(set(self._targets))
  580. def _get_node_attr_dim(self):
  581. return len(self._node_attrs)
  582. def _get_edge_attr_dim(self):
  583. return len(self._edge_attrs)
  584. def _compute_all_degree_entropy(self, base=None):
  585. """Compute the entropy of degree distribution of each graph.
  586. Parameters
  587. ----------
  588. base : float, optional
  589. The logarithmic base to use. The default is ``e`` (natural logarithm).
  590. Returns
  591. -------
  592. degree_entropy : float
  593. The calculated entropy.
  594. """
  595. from gklearn.utils.stats import entropy
  596. degree_entropy = []
  597. for g in self._graphs:
  598. degrees = list(dict(g.degree()).values())
  599. en = entropy(degrees, base=base)
  600. degree_entropy.append(en)
  601. return degree_entropy
  602. @property
  603. def graphs(self):
  604. return self._graphs
  605. @property
  606. def targets(self):
  607. return self._targets
  608. @property
  609. def node_labels(self):
  610. return self._node_labels
  611. @property
  612. def edge_labels(self):
  613. return self._edge_labels
  614. @property
  615. def node_attrs(self):
  616. return self._node_attrs
  617. @property
  618. def edge_attrs(self):
  619. return self._edge_attrs
  620. def split_dataset_by_target(dataset):
  621. from gklearn.preimage.utils import get_same_item_indices
  622. graphs = dataset.graphs
  623. targets = dataset.targets
  624. datasets = []
  625. idx_targets = get_same_item_indices(targets)
  626. for key, val in idx_targets.items():
  627. sub_graphs = [graphs[i] for i in val]
  628. sub_dataset = Dataset()
  629. sub_dataset.load_graphs(sub_graphs, [key] * len(val))
  630. node_labels = dataset.node_labels.copy() if dataset.node_labels is not None else None
  631. node_attrs = dataset.node_attrs.copy() if dataset.node_attrs is not None else None
  632. edge_labels = dataset.edge_labels.copy() if dataset.edge_labels is not None else None
  633. edge_attrs = dataset.edge_attrs.copy() if dataset.edge_attrs is not None else None
  634. sub_dataset.set_labels(node_labels=node_labels, node_attrs=node_attrs, edge_labels=edge_labels, edge_attrs=edge_attrs)
  635. datasets.append(sub_dataset)
  636. # @todo: clean_labels?
  637. return datasets

A Python package for graph kernels, graph edit distances and graph pre-image problem.