You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

dataset.py 26 kB

4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. """
  4. Created on Thu Mar 26 18:48:27 2020
  5. @author: ljia
  6. """
  7. import numpy as np
  8. import networkx as nx
  9. import os
  10. from gklearn.dataset import DATASET_META, DataFetcher, DataLoader
  11. class Dataset(object):
  12. def __init__(self, inputs=None, root='datasets', filename_targets=None, targets=None, mode='networkx', remove_null_graphs=True, clean_labels=True, reload=False, verbose=False, **kwargs):
  13. self._substructures = None
  14. self._node_label_dim = None
  15. self._edge_label_dim = None
  16. self._directed = None
  17. self._dataset_size = None
  18. self._total_node_num = None
  19. self._ave_node_num = None
  20. self._min_node_num = None
  21. self._max_node_num = None
  22. self._total_edge_num = None
  23. self._ave_edge_num = None
  24. self._min_edge_num = None
  25. self._max_edge_num = None
  26. self._ave_node_degree = None
  27. self._min_node_degree = None
  28. self._max_node_degree = None
  29. self._ave_fill_factor = None
  30. self._min_fill_factor = None
  31. self._max_fill_factor = None
  32. self._node_label_nums = None
  33. self._edge_label_nums = None
  34. self._node_attr_dim = None
  35. self._edge_attr_dim = None
  36. self._class_number = None
  37. self._ds_name = None
  38. if inputs is None:
  39. self._graphs = None
  40. self._targets = None
  41. self._node_labels = None
  42. self._edge_labels = None
  43. self._node_attrs = None
  44. self._edge_attrs = None
  45. # If inputs is a list of graphs.
  46. elif isinstance(inputs, list):
  47. node_labels = kwargs.get('node_labels', None)
  48. node_attrs = kwargs.get('node_attrs', None)
  49. edge_labels = kwargs.get('edge_labels', None)
  50. edge_attrs = kwargs.get('edge_attrs', None)
  51. self.load_graphs(inputs, targets=targets)
  52. self.set_labels(node_labels=node_labels, node_attrs=node_attrs, edge_labels=edge_labels, edge_attrs=edge_attrs)
  53. if clean_labels:
  54. self.clean_labels()
  55. elif isinstance(inputs, str):
  56. # If inputs is predefined dataset name.
  57. if inputs in DATASET_META:
  58. self.load_predefined_dataset(inputs, root=root, clean_labels=clean_labels, reload=reload, verbose=verbose)
  59. self._ds_name = inputs
  60. # If the dataset is specially defined, i.g., Alkane_unlabeled, MAO_lite.
  61. elif self.is_special_dataset(inputs):
  62. self.load_special_dataset(inputs, root, clean_labels, reload, verbose)
  63. self._ds_name = inputs
  64. # If inputs is a file name.
  65. elif os.path.isfile(inputs):
  66. self.load_dataset(inputs, filename_targets=filename_targets, clean_labels=clean_labels, **kwargs)
  67. # If inputs is a file name.
  68. else:
  69. raise ValueError('The "inputs" argument "' + inputs + '" is not a valid dataset name or file name.')
  70. else:
  71. raise TypeError('The "inputs" argument cannot be recognized. "Inputs" can be a list of graphs, a predefined dataset name, or a file name of a dataset.')
  72. if remove_null_graphs:
  73. self.trim_dataset(edge_required=False)
  74. def load_dataset(self, filename, filename_targets=None, clean_labels=True, **kwargs):
  75. self._graphs, self._targets, label_names = DataLoader(filename, filename_targets=filename_targets, **kwargs).data
  76. self._node_labels = label_names['node_labels']
  77. self._node_attrs = label_names['node_attrs']
  78. self._edge_labels = label_names['edge_labels']
  79. self._edge_attrs = label_names['edge_attrs']
  80. if clean_labels:
  81. self.clean_labels()
  82. def load_graphs(self, graphs, targets=None):
  83. # this has to be followed by set_labels().
  84. self._graphs = graphs
  85. self._targets = targets
  86. # self.set_labels_attrs() # @todo
  87. def load_predefined_dataset(self, ds_name, root='datasets', clean_labels=True, reload=False, verbose=False):
  88. path = DataFetcher(name=ds_name, root=root, reload=reload, verbose=verbose).path
  89. if DATASET_META[ds_name]['database'] == 'tudataset':
  90. ds_file = os.path.join(path, ds_name + '_A.txt')
  91. fn_targets = None
  92. else:
  93. load_files = DATASET_META[ds_name]['load_files']
  94. if isinstance(load_files[0], str):
  95. ds_file = os.path.join(path, load_files[0])
  96. else: # load_files[0] is a list of files.
  97. ds_file = [os.path.join(path, fn) for fn in load_files[0]]
  98. fn_targets = os.path.join(path, load_files[1]) if len(load_files) == 2 else None
  99. if 'extra_params' in DATASET_META[ds_name]:
  100. kwargs = DATASET_META[ds_name]['extra_params']
  101. else:
  102. kwargs = {}
  103. self._graphs, self._targets, label_names = DataLoader(ds_file, filename_targets=fn_targets, **kwargs).data
  104. self._node_labels = label_names['node_labels']
  105. self._node_attrs = label_names['node_attrs']
  106. self._edge_labels = label_names['edge_labels']
  107. self._edge_attrs = label_names['edge_attrs']
  108. if clean_labels:
  109. self.clean_labels()
  110. # Deal with specific datasets.
  111. if ds_name == 'Alkane':
  112. self.trim_dataset(edge_required=True)
  113. self.remove_labels(node_labels=['atom_symbol'])
  114. def set_labels(self, node_labels=[], node_attrs=[], edge_labels=[], edge_attrs=[]):
  115. self._node_labels = node_labels
  116. self._node_attrs = node_attrs
  117. self._edge_labels = edge_labels
  118. self._edge_attrs = edge_attrs
  119. def set_labels_attrs(self, node_labels=None, node_attrs=None, edge_labels=None, edge_attrs=None):
  120. # @todo: remove labels which have only one possible values.
  121. if node_labels is None:
  122. self._node_labels = self._graphs[0].graph['node_labels']
  123. # # graphs are considered node unlabeled if all nodes have the same label.
  124. # infos.update({'node_labeled': is_nl if node_label_num > 1 else False})
  125. if node_attrs is None:
  126. self._node_attrs = self._graphs[0].graph['node_attrs']
  127. # for G in Gn:
  128. # for n in G.nodes(data=True):
  129. # if 'attributes' in n[1]:
  130. # return len(n[1]['attributes'])
  131. # return 0
  132. if edge_labels is None:
  133. self._edge_labels = self._graphs[0].graph['edge_labels']
  134. # # graphs are considered edge unlabeled if all edges have the same label.
  135. # infos.update({'edge_labeled': is_el if edge_label_num > 1 else False})
  136. if edge_attrs is None:
  137. self._edge_attrs = self._graphs[0].graph['edge_attrs']
  138. # for G in Gn:
  139. # if nx.number_of_edges(G) > 0:
  140. # for e in G.edges(data=True):
  141. # if 'attributes' in e[2]:
  142. # return len(e[2]['attributes'])
  143. # return 0
  144. def get_dataset_infos(self, keys=None, params=None):
  145. """Computes and returns the structure and property information of the graph dataset.
  146. Parameters
  147. ----------
  148. keys : list, optional
  149. A list of strings which indicate which informations will be returned. The
  150. possible choices includes:
  151. 'substructures': sub-structures graphs contains, including 'linear', 'non
  152. linear' and 'cyclic'.
  153. 'node_label_dim': whether vertices have symbolic labels.
  154. 'edge_label_dim': whether egdes have symbolic labels.
  155. 'directed': whether graphs in dataset are directed.
  156. 'dataset_size': number of graphs in dataset.
  157. 'total_node_num': total number of vertices of all graphs in dataset.
  158. 'ave_node_num': average number of vertices of graphs in dataset.
  159. 'min_node_num': minimum number of vertices of graphs in dataset.
  160. 'max_node_num': maximum number of vertices of graphs in dataset.
  161. 'total_edge_num': total number of edges of all graphs in dataset.
  162. 'ave_edge_num': average number of edges of graphs in dataset.
  163. 'min_edge_num': minimum number of edges of graphs in dataset.
  164. 'max_edge_num': maximum number of edges of graphs in dataset.
  165. 'ave_node_degree': average vertex degree of graphs in dataset.
  166. 'min_node_degree': minimum vertex degree of graphs in dataset.
  167. 'max_node_degree': maximum vertex degree of graphs in dataset.
  168. 'ave_fill_factor': average fill factor (number_of_edges /
  169. (number_of_nodes ** 2)) of graphs in dataset.
  170. 'min_fill_factor': minimum fill factor of graphs in dataset.
  171. 'max_fill_factor': maximum fill factor of graphs in dataset.
  172. 'node_label_nums': list of numbers of symbolic vertex labels of graphs in dataset.
  173. 'edge_label_nums': list number of symbolic edge labels of graphs in dataset.
  174. 'node_attr_dim': number of dimensions of non-symbolic vertex labels.
  175. Extracted from the 'attributes' attribute of graph nodes.
  176. 'edge_attr_dim': number of dimensions of non-symbolic edge labels.
  177. Extracted from the 'attributes' attribute of graph edges.
  178. 'class_number': number of classes. Only available for classification problems.
  179. 'all_degree_entropy': the entropy of degree distribution of each graph.
  180. 'ave_degree_entropy': the average entropy of degree distribution of all graphs.
  181. All informations above will be returned if `keys` is not given.
  182. params: dict of dict, optional
  183. A dictinary which contains extra parameters for each possible
  184. element in ``keys``.
  185. Return
  186. ------
  187. dict
  188. Information of the graph dataset keyed by `keys`.
  189. """
  190. infos = {}
  191. if keys == None:
  192. keys = [
  193. 'substructures',
  194. 'node_label_dim',
  195. 'edge_label_dim',
  196. 'directed',
  197. 'dataset_size',
  198. 'total_node_num',
  199. 'ave_node_num',
  200. 'min_node_num',
  201. 'max_node_num',
  202. 'total_edge_num',
  203. 'ave_edge_num',
  204. 'min_edge_num',
  205. 'max_edge_num',
  206. 'ave_node_degree',
  207. 'min_node_degree',
  208. 'max_node_degree',
  209. 'ave_fill_factor',
  210. 'min_fill_factor',
  211. 'max_fill_factor',
  212. 'node_label_nums',
  213. 'edge_label_nums',
  214. 'node_attr_dim',
  215. 'edge_attr_dim',
  216. 'class_number',
  217. 'all_degree_entropy',
  218. 'ave_degree_entropy'
  219. ]
  220. # dataset size
  221. if 'dataset_size' in keys:
  222. if self._dataset_size is None:
  223. self._dataset_size = self._get_dataset_size()
  224. infos['dataset_size'] = self._dataset_size
  225. # graph node number
  226. if any(i in keys for i in ['total_node_num', 'ave_node_num', 'min_node_num', 'max_node_num']):
  227. all_node_nums = self._get_all_node_nums()
  228. if 'total_node_num' in keys:
  229. if self._total_node_num is None:
  230. self._total_node_num = self._get_total_node_num(all_node_nums)
  231. infos['total_node_num'] = self._total_node_num
  232. if 'ave_node_num' in keys:
  233. if self._ave_node_num is None:
  234. self._ave_node_num = self._get_ave_node_num(all_node_nums)
  235. infos['ave_node_num'] = self._ave_node_num
  236. if 'min_node_num' in keys:
  237. if self._min_node_num is None:
  238. self._min_node_num = self._get_min_node_num(all_node_nums)
  239. infos['min_node_num'] = self._min_node_num
  240. if 'max_node_num' in keys:
  241. if self._max_node_num is None:
  242. self._max_node_num = self._get_max_node_num(all_node_nums)
  243. infos['max_node_num'] = self._max_node_num
  244. # graph edge number
  245. if any(i in keys for i in ['total_edge_num', 'ave_edge_num', 'min_edge_num', 'max_edge_num']):
  246. all_edge_nums = self._get_all_edge_nums()
  247. if 'total_edge_num' in keys:
  248. if self._total_edge_num is None:
  249. self._total_edge_num = self._get_total_edge_num(all_edge_nums)
  250. infos['total_edge_num'] = self._total_edge_num
  251. if 'ave_edge_num' in keys:
  252. if self._ave_edge_num is None:
  253. self._ave_edge_num = self._get_ave_edge_num(all_edge_nums)
  254. infos['ave_edge_num'] = self._ave_edge_num
  255. if 'max_edge_num' in keys:
  256. if self._max_edge_num is None:
  257. self._max_edge_num = self._get_max_edge_num(all_edge_nums)
  258. infos['max_edge_num'] = self._max_edge_num
  259. if 'min_edge_num' in keys:
  260. if self._min_edge_num is None:
  261. self._min_edge_num = self._get_min_edge_num(all_edge_nums)
  262. infos['min_edge_num'] = self._min_edge_num
  263. # label number
  264. if 'node_label_dim' in keys:
  265. if self._node_label_dim is None:
  266. self._node_label_dim = self._get_node_label_dim()
  267. infos['node_label_dim'] = self._node_label_dim
  268. if 'node_label_nums' in keys:
  269. if self._node_label_nums is None:
  270. self._node_label_nums = {}
  271. for node_label in self._node_labels:
  272. self._node_label_nums[node_label] = self._get_node_label_num(node_label)
  273. infos['node_label_nums'] = self._node_label_nums
  274. if 'edge_label_dim' in keys:
  275. if self._edge_label_dim is None:
  276. self._edge_label_dim = self._get_edge_label_dim()
  277. infos['edge_label_dim'] = self._edge_label_dim
  278. if 'edge_label_nums' in keys:
  279. if self._edge_label_nums is None:
  280. self._edge_label_nums = {}
  281. for edge_label in self._edge_labels:
  282. self._edge_label_nums[edge_label] = self._get_edge_label_num(edge_label)
  283. infos['edge_label_nums'] = self._edge_label_nums
  284. if 'directed' in keys or 'substructures' in keys:
  285. if self._directed is None:
  286. self._directed = self._is_directed()
  287. infos['directed'] = self._directed
  288. # node degree
  289. if any(i in keys for i in ['ave_node_degree', 'max_node_degree', 'min_node_degree']):
  290. all_node_degrees = self._get_all_node_degrees()
  291. if 'ave_node_degree' in keys:
  292. if self._ave_node_degree is None:
  293. self._ave_node_degree = self._get_ave_node_degree(all_node_degrees)
  294. infos['ave_node_degree'] = self._ave_node_degree
  295. if 'max_node_degree' in keys:
  296. if self._max_node_degree is None:
  297. self._max_node_degree = self._get_max_node_degree(all_node_degrees)
  298. infos['max_node_degree'] = self._max_node_degree
  299. if 'min_node_degree' in keys:
  300. if self._min_node_degree is None:
  301. self._min_node_degree = self._get_min_node_degree(all_node_degrees)
  302. infos['min_node_degree'] = self._min_node_degree
  303. # fill factor
  304. if any(i in keys for i in ['ave_fill_factor', 'max_fill_factor', 'min_fill_factor']):
  305. all_fill_factors = self._get_all_fill_factors()
  306. if 'ave_fill_factor' in keys:
  307. if self._ave_fill_factor is None:
  308. self._ave_fill_factor = self._get_ave_fill_factor(all_fill_factors)
  309. infos['ave_fill_factor'] = self._ave_fill_factor
  310. if 'max_fill_factor' in keys:
  311. if self._max_fill_factor is None:
  312. self._max_fill_factor = self._get_max_fill_factor(all_fill_factors)
  313. infos['max_fill_factor'] = self._max_fill_factor
  314. if 'min_fill_factor' in keys:
  315. if self._min_fill_factor is None:
  316. self._min_fill_factor = self._get_min_fill_factor(all_fill_factors)
  317. infos['min_fill_factor'] = self._min_fill_factor
  318. if 'substructures' in keys:
  319. if self._substructures is None:
  320. self._substructures = self._get_substructures()
  321. infos['substructures'] = self._substructures
  322. if 'class_number' in keys:
  323. if self._class_number is None:
  324. self._class_number = self._get_class_number()
  325. infos['class_number'] = self._class_number
  326. if 'node_attr_dim' in keys:
  327. if self._node_attr_dim is None:
  328. self._node_attr_dim = self._get_node_attr_dim()
  329. infos['node_attr_dim'] = self._node_attr_dim
  330. if 'edge_attr_dim' in keys:
  331. if self._edge_attr_dim is None:
  332. self._edge_attr_dim = self._get_edge_attr_dim()
  333. infos['edge_attr_dim'] = self._edge_attr_dim
  334. # entropy of degree distribution.
  335. if 'all_degree_entropy' in keys:
  336. if params is not None and ('all_degree_entropy' in params) and ('base' in params['all_degree_entropy']):
  337. base = params['all_degree_entropy']['base']
  338. else:
  339. base = None
  340. infos['all_degree_entropy'] = self._compute_all_degree_entropy(base=base)
  341. if 'ave_degree_entropy' in keys:
  342. if params is not None and ('ave_degree_entropy' in params) and ('base' in params['ave_degree_entropy']):
  343. base = params['ave_degree_entropy']['base']
  344. else:
  345. base = None
  346. infos['ave_degree_entropy'] = np.mean(self._compute_all_degree_entropy(base=base))
  347. return infos
  348. def print_graph_infos(self, infos):
  349. from collections import OrderedDict
  350. keys = list(infos.keys())
  351. print(OrderedDict(sorted(infos.items(), key=lambda i: keys.index(i[0]))))
  352. def remove_labels(self, node_labels=[], edge_labels=[], node_attrs=[], edge_attrs=[]):
  353. node_labels = [item for item in node_labels if item in self._node_labels]
  354. edge_labels = [item for item in edge_labels if item in self._edge_labels]
  355. node_attrs = [item for item in node_attrs if item in self._node_attrs]
  356. edge_attrs = [item for item in edge_attrs if item in self._edge_attrs]
  357. for g in self._graphs:
  358. for nd in g.nodes():
  359. for nl in node_labels:
  360. del g.nodes[nd][nl]
  361. for na in node_attrs:
  362. del g.nodes[nd][na]
  363. for ed in g.edges():
  364. for el in edge_labels:
  365. del g.edges[ed][el]
  366. for ea in edge_attrs:
  367. del g.edges[ed][ea]
  368. if len(node_labels) > 0:
  369. self._node_labels = [nl for nl in self._node_labels if nl not in node_labels]
  370. if len(edge_labels) > 0:
  371. self._edge_labels = [el for el in self._edge_labels if el not in edge_labels]
  372. if len(node_attrs) > 0:
  373. self._node_attrs = [na for na in self._node_attrs if na not in node_attrs]
  374. if len(edge_attrs) > 0:
  375. self._edge_attrs = [ea for ea in self._edge_attrs if ea not in edge_attrs]
  376. def clean_labels(self):
  377. labels = []
  378. for name in self._node_labels:
  379. label = set()
  380. for G in self._graphs:
  381. label = label | set(nx.get_node_attributes(G, name).values())
  382. if len(label) > 1:
  383. labels.append(name)
  384. break
  385. if len(label) < 2:
  386. for G in self._graphs:
  387. for nd in G.nodes():
  388. del G.nodes[nd][name]
  389. self._node_labels = labels
  390. labels = []
  391. for name in self._edge_labels:
  392. label = set()
  393. for G in self._graphs:
  394. label = label | set(nx.get_edge_attributes(G, name).values())
  395. if len(label) > 1:
  396. labels.append(name)
  397. break
  398. if len(label) < 2:
  399. for G in self._graphs:
  400. for ed in G.edges():
  401. del G.edges[ed][name]
  402. self._edge_labels = labels
  403. labels = []
  404. for name in self._node_attrs:
  405. label = set()
  406. for G in self._graphs:
  407. label = label | set(nx.get_node_attributes(G, name).values())
  408. if len(label) > 1:
  409. labels.append(name)
  410. break
  411. if len(label) < 2:
  412. for G in self._graphs:
  413. for nd in G.nodes():
  414. del G.nodes[nd][name]
  415. self._node_attrs = labels
  416. labels = []
  417. for name in self._edge_attrs:
  418. label = set()
  419. for G in self._graphs:
  420. label = label | set(nx.get_edge_attributes(G, name).values())
  421. if len(label) > 1:
  422. labels.append(name)
  423. break
  424. if len(label) < 2:
  425. for G in self._graphs:
  426. for ed in G.edges():
  427. del G.edges[ed][name]
  428. self._edge_attrs = labels
  429. def cut_graphs(self, range_):
  430. self._graphs = [self._graphs[i] for i in range_]
  431. if self._targets is not None:
  432. self._targets = [self._targets[i] for i in range_]
  433. self.clean_labels()
  434. def trim_dataset(self, edge_required=False):
  435. if edge_required: # @todo: there is a possibility that some node labels will be removed.
  436. trimed_pairs = [(idx, g) for idx, g in enumerate(self._graphs) if (nx.number_of_nodes(g) != 0 and nx.number_of_edges(g) != 0)]
  437. else:
  438. trimed_pairs = [(idx, g) for idx, g in enumerate(self._graphs) if nx.number_of_nodes(g) != 0]
  439. idx = [p[0] for p in trimed_pairs]
  440. self._graphs = [p[1] for p in trimed_pairs]
  441. self._targets = [self._targets[i] for i in idx]
  442. self.clean_labels()
  443. def copy(self):
  444. dataset = Dataset()
  445. graphs = [g.copy() for g in self._graphs] if self._graphs is not None else None
  446. target = self._targets.copy() if self._targets is not None else None
  447. node_labels = self._node_labels.copy() if self._node_labels is not None else None
  448. node_attrs = self._node_attrs.copy() if self._node_attrs is not None else None
  449. edge_labels = self._edge_labels.copy() if self._edge_labels is not None else None
  450. edge_attrs = self._edge_attrs.copy() if self._edge_attrs is not None else None
  451. dataset.load_graphs(graphs, target)
  452. dataset.set_labels(node_labels=node_labels, node_attrs=node_attrs, edge_labels=edge_labels, edge_attrs=edge_attrs)
  453. # @todo: clean_labels and add other class members?
  454. return dataset
  455. def is_special_dataset(self, inputs):
  456. if inputs.endswith('_unlabeled'):
  457. return True
  458. if inputs == 'MAO_lite':
  459. return True
  460. if inputs == 'Monoterpens':
  461. return True
  462. return False
  463. def load_special_dataset(self, inputs, root, clean_labels, reload, verbose):
  464. if inputs.endswith('_unlabeled'):
  465. self.load_predefined_dataset(inputs[:len(inputs) - 10], root=root, clean_labels=clean_labels, reload=reload, verbose=verbose)
  466. self.remove_labels(node_labels=self._node_labels,
  467. edge_labels=self._edge_labels,
  468. node_attrs=self._node_attrs,
  469. edge_attrs=self._edge_attrs)
  470. elif inputs == 'MAO_lite':
  471. self.load_predefined_dataset(inputs[:len(inputs) - 5], root=root, clean_labels=clean_labels, reload=reload, verbose=verbose)
  472. self.remove_labels(edge_labels=['bond_stereo'], node_attrs=['x', 'y'])
  473. elif inputs == 'Monoterpens':
  474. self.load_predefined_dataset('Monoterpenoides', root=root, clean_labels=clean_labels, reload=reload, verbose=verbose)
  475. def get_all_node_labels(self):
  476. node_labels = []
  477. for g in self._graphs:
  478. for n in g.nodes():
  479. nl = tuple(g.nodes[n].items())
  480. if nl not in node_labels:
  481. node_labels.append(nl)
  482. return node_labels
  483. def get_all_edge_labels(self):
  484. edge_labels = []
  485. for g in self._graphs:
  486. for e in g.edges():
  487. el = tuple(g.edges[e].items())
  488. if el not in edge_labels:
  489. edge_labels.append(el)
  490. return edge_labels
  491. def _get_dataset_size(self):
  492. return len(self._graphs)
  493. def _get_all_node_nums(self):
  494. return [nx.number_of_nodes(G) for G in self._graphs]
  495. def _get_total_node_nums(self, all_node_nums):
  496. return np.sum(all_node_nums)
  497. def _get_ave_node_num(self, all_node_nums):
  498. return np.mean(all_node_nums)
  499. def _get_min_node_num(self, all_node_nums):
  500. return np.amin(all_node_nums)
  501. def _get_max_node_num(self, all_node_nums):
  502. return np.amax(all_node_nums)
  503. def _get_all_edge_nums(self):
  504. return [nx.number_of_edges(G) for G in self._graphs]
  505. def _get_total_edge_nums(self, all_edge_nums):
  506. return np.sum(all_edge_nums)
  507. def _get_ave_edge_num(self, all_edge_nums):
  508. return np.mean(all_edge_nums)
  509. def _get_min_edge_num(self, all_edge_nums):
  510. return np.amin(all_edge_nums)
  511. def _get_max_edge_num(self, all_edge_nums):
  512. return np.amax(all_edge_nums)
  513. def _get_node_label_dim(self):
  514. return len(self._node_labels)
  515. def _get_node_label_num(self, node_label):
  516. nl = set()
  517. for G in self._graphs:
  518. nl = nl | set(nx.get_node_attributes(G, node_label).values())
  519. return len(nl)
  520. def _get_edge_label_dim(self):
  521. return len(self._edge_labels)
  522. def _get_edge_label_num(self, edge_label):
  523. el = set()
  524. for G in self._graphs:
  525. el = el | set(nx.get_edge_attributes(G, edge_label).values())
  526. return len(el)
  527. def _is_directed(self):
  528. return nx.is_directed(self._graphs[0])
  529. def _get_all_node_degrees(self):
  530. return [np.mean(list(dict(G.degree()).values())) for G in self._graphs]
  531. def _get_ave_node_degree(self, all_node_degrees):
  532. return np.mean(all_node_degrees)
  533. def _get_max_node_degree(self, all_node_degrees):
  534. return np.amax(all_node_degrees)
  535. def _get_min_node_degree(self, all_node_degrees):
  536. return np.amin(all_node_degrees)
  537. def _get_all_fill_factors(self):
  538. """Get fill factor, the number of non-zero entries in the adjacency matrix.
  539. Returns
  540. -------
  541. list[float]
  542. List of fill factors for all graphs.
  543. """
  544. return [nx.number_of_edges(G) / (nx.number_of_nodes(G) ** 2) for G in self._graphs]
  545. def _get_ave_fill_factor(self, all_fill_factors):
  546. return np.mean(all_fill_factors)
  547. def _get_max_fill_factor(self, all_fill_factors):
  548. return np.amax(all_fill_factors)
  549. def _get_min_fill_factor(self, all_fill_factors):
  550. return np.amin(all_fill_factors)
  551. def _get_substructures(self):
  552. subs = set()
  553. for G in self._graphs:
  554. degrees = list(dict(G.degree()).values())
  555. if any(i == 2 for i in degrees):
  556. subs.add('linear')
  557. if np.amax(degrees) >= 3:
  558. subs.add('non linear')
  559. if 'linear' in subs and 'non linear' in subs:
  560. break
  561. if self._directed:
  562. for G in self._graphs:
  563. if len(list(nx.find_cycle(G))) > 0:
  564. subs.add('cyclic')
  565. break
  566. # else:
  567. # # @todo: this method does not work for big graph with large amount of edges like D&D, try a better way.
  568. # upper = np.amin([nx.number_of_edges(G) for G in Gn]) * 2 + 10
  569. # for G in Gn:
  570. # if (nx.number_of_edges(G) < upper):
  571. # cyc = list(nx.simple_cycles(G.to_directed()))
  572. # if any(len(i) > 2 for i in cyc):
  573. # subs.add('cyclic')
  574. # break
  575. # if 'cyclic' not in subs:
  576. # for G in Gn:
  577. # cyc = list(nx.simple_cycles(G.to_directed()))
  578. # if any(len(i) > 2 for i in cyc):
  579. # subs.add('cyclic')
  580. # break
  581. return subs
  582. def _get_class_num(self):
  583. return len(set(self._targets))
  584. def _get_node_attr_dim(self):
  585. return len(self._node_attrs)
  586. def _get_edge_attr_dim(self):
  587. return len(self._edge_attrs)
  588. def _compute_all_degree_entropy(self, base=None):
  589. """Compute the entropy of degree distribution of each graph.
  590. Parameters
  591. ----------
  592. base : float, optional
  593. The logarithmic base to use. The default is ``e`` (natural logarithm).
  594. Returns
  595. -------
  596. degree_entropy : float
  597. The calculated entropy.
  598. """
  599. from gklearn.utils.stats import entropy
  600. degree_entropy = []
  601. for g in self._graphs:
  602. degrees = list(dict(g.degree()).values())
  603. en = entropy(degrees, base=base)
  604. degree_entropy.append(en)
  605. return degree_entropy
  606. @property
  607. def graphs(self):
  608. return self._graphs
  609. @property
  610. def targets(self):
  611. return self._targets
  612. @property
  613. def node_labels(self):
  614. return self._node_labels
  615. @property
  616. def edge_labels(self):
  617. return self._edge_labels
  618. @property
  619. def node_attrs(self):
  620. return self._node_attrs
  621. @property
  622. def edge_attrs(self):
  623. return self._edge_attrs
  624. def split_dataset_by_target(dataset):
  625. from gklearn.preimage.utils import get_same_item_indices
  626. graphs = dataset.graphs
  627. targets = dataset.targets
  628. datasets = []
  629. idx_targets = get_same_item_indices(targets)
  630. for key, val in idx_targets.items():
  631. sub_graphs = [graphs[i] for i in val]
  632. sub_dataset = Dataset()
  633. sub_dataset.load_graphs(sub_graphs, [key] * len(val))
  634. node_labels = dataset.node_labels.copy() if dataset.node_labels is not None else None
  635. node_attrs = dataset.node_attrs.copy() if dataset.node_attrs is not None else None
  636. edge_labels = dataset.edge_labels.copy() if dataset.edge_labels is not None else None
  637. edge_attrs = dataset.edge_attrs.copy() if dataset.edge_attrs is not None else None
  638. sub_dataset.set_labels(node_labels=node_labels, node_attrs=node_attrs, edge_labels=edge_labels, edge_attrs=edge_attrs)
  639. datasets.append(sub_dataset)
  640. # @todo: clean_labels?
  641. return datasets

A Python package for graph kernels, graph edit distances and graph pre-image problem.