You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

dataset.py 25 kB

4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
4 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. """
  4. Created on Thu Mar 26 18:48:27 2020
  5. @author: ljia
  6. """
  7. import numpy as np
  8. import networkx as nx
  9. import os
  10. from gklearn.dataset import DATASET_META, DataFetcher, DataLoader
  11. class Dataset(object):
  12. def __init__(self, inputs=None, root='datasets', filename_targets=None, targets=None, mode='networkx', clean_labels=True, reload=False, verbose=False, **kwargs):
  13. self._substructures = None
  14. self._node_label_dim = None
  15. self._edge_label_dim = None
  16. self._directed = None
  17. self._dataset_size = None
  18. self._total_node_num = None
  19. self._ave_node_num = None
  20. self._min_node_num = None
  21. self._max_node_num = None
  22. self._total_edge_num = None
  23. self._ave_edge_num = None
  24. self._min_edge_num = None
  25. self._max_edge_num = None
  26. self._ave_node_degree = None
  27. self._min_node_degree = None
  28. self._max_node_degree = None
  29. self._ave_fill_factor = None
  30. self._min_fill_factor = None
  31. self._max_fill_factor = None
  32. self._node_label_nums = None
  33. self._edge_label_nums = None
  34. self._node_attr_dim = None
  35. self._edge_attr_dim = None
  36. self._class_number = None
  37. self._ds_name = None
  38. if inputs is None:
  39. self._graphs = None
  40. self._targets = None
  41. self._node_labels = None
  42. self._edge_labels = None
  43. self._node_attrs = None
  44. self._edge_attrs = None
  45. # If inputs is a list of graphs.
  46. elif isinstance(inputs, list):
  47. node_labels = kwargs.get('node_labels', None)
  48. node_attrs = kwargs.get('node_attrs', None)
  49. edge_labels = kwargs.get('edge_labels', None)
  50. edge_attrs = kwargs.get('edge_attrs', None)
  51. self.load_graphs(inputs, targets=targets)
  52. self.set_labels(node_labels=node_labels, node_attrs=node_attrs, edge_labels=edge_labels, edge_attrs=edge_attrs)
  53. if clean_labels:
  54. self.clean_labels()
  55. elif isinstance(inputs, str):
  56. # If inputs is predefined dataset name.
  57. if inputs in DATASET_META:
  58. self.load_predefined_dataset(inputs, root=root, clean_labels=clean_labels, reload=reload, verbose=verbose)
  59. self._ds_name = inputs
  60. elif inputs.endswith('_unlabeled'):
  61. self.load_predefined_dataset(inputs[:len(inputs) - 10], root=root, clean_labels=clean_labels, reload=reload, verbose=verbose)
  62. self._ds_name = inputs
  63. # Deal with special suffices.
  64. self.check_special_suffices()
  65. # If inputs is a file name.
  66. elif os.path.isfile(inputs):
  67. self.load_dataset(inputs, filename_targets=filename_targets, clean_labels=clean_labels, **kwargs)
  68. # If inputs is a file name.
  69. else:
  70. raise ValueError('The "inputs" argument "' + inputs + '" is not a valid dataset name or file name.')
  71. else:
  72. raise TypeError('The "inputs" argument cannot be recognized. "Inputs" can be a list of graphs, a predefined dataset name, or a file name of a dataset.')
  73. def load_dataset(self, filename, filename_targets=None, clean_labels=True, **kwargs):
  74. self._graphs, self._targets, label_names = DataLoader(filename, filename_targets=filename_targets, **kwargs).data
  75. self._node_labels = label_names['node_labels']
  76. self._node_attrs = label_names['node_attrs']
  77. self._edge_labels = label_names['edge_labels']
  78. self._edge_attrs = label_names['edge_attrs']
  79. if clean_labels:
  80. self.clean_labels()
  81. def load_graphs(self, graphs, targets=None):
  82. # this has to be followed by set_labels().
  83. self._graphs = graphs
  84. self._targets = targets
  85. # self.set_labels_attrs() # @todo
  86. def load_predefined_dataset(self, ds_name, root='datasets', clean_labels=True, reload=False, verbose=False):
  87. path = DataFetcher(name=ds_name, root=root, reload=reload, verbose=verbose).path
  88. if DATASET_META[ds_name]['database'] == 'tudataset':
  89. ds_file = os.path.join(path, ds_name + '_A.txt')
  90. fn_targets = None
  91. else:
  92. load_files = DATASET_META[ds_name]['load_files']
  93. if isinstance(load_files[0], str):
  94. ds_file = os.path.join(path, load_files[0])
  95. else: # load_files[0] is a list of files.
  96. ds_file = [os.path.join(path, fn) for fn in load_files[0]]
  97. fn_targets = os.path.join(path, load_files[1]) if len(load_files) == 2 else None
  98. self._graphs, self._targets, label_names = DataLoader(ds_file, filename_targets=fn_targets).data
  99. self._node_labels = label_names['node_labels']
  100. self._node_attrs = label_names['node_attrs']
  101. self._edge_labels = label_names['edge_labels']
  102. self._edge_attrs = label_names['edge_attrs']
  103. if clean_labels:
  104. self.clean_labels()
  105. # Deal with specific datasets.
  106. if ds_name == 'Alkane':
  107. self.trim_dataset(edge_required=True)
  108. self.remove_labels(node_labels=['atom_symbol'])
  109. def set_labels(self, node_labels=[], node_attrs=[], edge_labels=[], edge_attrs=[]):
  110. self._node_labels = node_labels
  111. self._node_attrs = node_attrs
  112. self._edge_labels = edge_labels
  113. self._edge_attrs = edge_attrs
  114. def set_labels_attrs(self, node_labels=None, node_attrs=None, edge_labels=None, edge_attrs=None):
  115. # @todo: remove labels which have only one possible values.
  116. if node_labels is None:
  117. self._node_labels = self._graphs[0].graph['node_labels']
  118. # # graphs are considered node unlabeled if all nodes have the same label.
  119. # infos.update({'node_labeled': is_nl if node_label_num > 1 else False})
  120. if node_attrs is None:
  121. self._node_attrs = self._graphs[0].graph['node_attrs']
  122. # for G in Gn:
  123. # for n in G.nodes(data=True):
  124. # if 'attributes' in n[1]:
  125. # return len(n[1]['attributes'])
  126. # return 0
  127. if edge_labels is None:
  128. self._edge_labels = self._graphs[0].graph['edge_labels']
  129. # # graphs are considered edge unlabeled if all edges have the same label.
  130. # infos.update({'edge_labeled': is_el if edge_label_num > 1 else False})
  131. if edge_attrs is None:
  132. self._edge_attrs = self._graphs[0].graph['edge_attrs']
  133. # for G in Gn:
  134. # if nx.number_of_edges(G) > 0:
  135. # for e in G.edges(data=True):
  136. # if 'attributes' in e[2]:
  137. # return len(e[2]['attributes'])
  138. # return 0
  139. def get_dataset_infos(self, keys=None, params=None):
  140. """Computes and returns the structure and property information of the graph dataset.
  141. Parameters
  142. ----------
  143. keys : list, optional
  144. A list of strings which indicate which informations will be returned. The
  145. possible choices includes:
  146. 'substructures': sub-structures graphs contains, including 'linear', 'non
  147. linear' and 'cyclic'.
  148. 'node_label_dim': whether vertices have symbolic labels.
  149. 'edge_label_dim': whether egdes have symbolic labels.
  150. 'directed': whether graphs in dataset are directed.
  151. 'dataset_size': number of graphs in dataset.
  152. 'total_node_num': total number of vertices of all graphs in dataset.
  153. 'ave_node_num': average number of vertices of graphs in dataset.
  154. 'min_node_num': minimum number of vertices of graphs in dataset.
  155. 'max_node_num': maximum number of vertices of graphs in dataset.
  156. 'total_edge_num': total number of edges of all graphs in dataset.
  157. 'ave_edge_num': average number of edges of graphs in dataset.
  158. 'min_edge_num': minimum number of edges of graphs in dataset.
  159. 'max_edge_num': maximum number of edges of graphs in dataset.
  160. 'ave_node_degree': average vertex degree of graphs in dataset.
  161. 'min_node_degree': minimum vertex degree of graphs in dataset.
  162. 'max_node_degree': maximum vertex degree of graphs in dataset.
  163. 'ave_fill_factor': average fill factor (number_of_edges /
  164. (number_of_nodes ** 2)) of graphs in dataset.
  165. 'min_fill_factor': minimum fill factor of graphs in dataset.
  166. 'max_fill_factor': maximum fill factor of graphs in dataset.
  167. 'node_label_nums': list of numbers of symbolic vertex labels of graphs in dataset.
  168. 'edge_label_nums': list number of symbolic edge labels of graphs in dataset.
  169. 'node_attr_dim': number of dimensions of non-symbolic vertex labels.
  170. Extracted from the 'attributes' attribute of graph nodes.
  171. 'edge_attr_dim': number of dimensions of non-symbolic edge labels.
  172. Extracted from the 'attributes' attribute of graph edges.
  173. 'class_number': number of classes. Only available for classification problems.
  174. 'all_degree_entropy': the entropy of degree distribution of each graph.
  175. 'ave_degree_entropy': the average entropy of degree distribution of all graphs.
  176. All informations above will be returned if `keys` is not given.
  177. params: dict of dict, optional
  178. A dictinary which contains extra parameters for each possible
  179. element in ``keys``.
  180. Return
  181. ------
  182. dict
  183. Information of the graph dataset keyed by `keys`.
  184. """
  185. infos = {}
  186. if keys == None:
  187. keys = [
  188. 'substructures',
  189. 'node_label_dim',
  190. 'edge_label_dim',
  191. 'directed',
  192. 'dataset_size',
  193. 'total_node_num',
  194. 'ave_node_num',
  195. 'min_node_num',
  196. 'max_node_num',
  197. 'total_edge_num',
  198. 'ave_edge_num',
  199. 'min_edge_num',
  200. 'max_edge_num',
  201. 'ave_node_degree',
  202. 'min_node_degree',
  203. 'max_node_degree',
  204. 'ave_fill_factor',
  205. 'min_fill_factor',
  206. 'max_fill_factor',
  207. 'node_label_nums',
  208. 'edge_label_nums',
  209. 'node_attr_dim',
  210. 'edge_attr_dim',
  211. 'class_number',
  212. 'all_degree_entropy',
  213. 'ave_degree_entropy'
  214. ]
  215. # dataset size
  216. if 'dataset_size' in keys:
  217. if self._dataset_size is None:
  218. self._dataset_size = self._get_dataset_size()
  219. infos['dataset_size'] = self._dataset_size
  220. # graph node number
  221. if any(i in keys for i in ['total_node_num', 'ave_node_num', 'min_node_num', 'max_node_num']):
  222. all_node_nums = self._get_all_node_nums()
  223. if 'total_node_num' in keys:
  224. if self._total_node_num is None:
  225. self._total_node_num = self._get_total_node_num(all_node_nums)
  226. infos['total_node_num'] = self._total_node_num
  227. if 'ave_node_num' in keys:
  228. if self._ave_node_num is None:
  229. self._ave_node_num = self._get_ave_node_num(all_node_nums)
  230. infos['ave_node_num'] = self._ave_node_num
  231. if 'min_node_num' in keys:
  232. if self._min_node_num is None:
  233. self._min_node_num = self._get_min_node_num(all_node_nums)
  234. infos['min_node_num'] = self._min_node_num
  235. if 'max_node_num' in keys:
  236. if self._max_node_num is None:
  237. self._max_node_num = self._get_max_node_num(all_node_nums)
  238. infos['max_node_num'] = self._max_node_num
  239. # graph edge number
  240. if any(i in keys for i in ['total_edge_num', 'ave_edge_num', 'min_edge_num', 'max_edge_num']):
  241. all_edge_nums = self._get_all_edge_nums()
  242. if 'total_edge_num' in keys:
  243. if self._total_edge_num is None:
  244. self._total_edge_num = self._get_total_edge_num(all_edge_nums)
  245. infos['total_edge_num'] = self._total_edge_num
  246. if 'ave_edge_num' in keys:
  247. if self._ave_edge_num is None:
  248. self._ave_edge_num = self._get_ave_edge_num(all_edge_nums)
  249. infos['ave_edge_num'] = self._ave_edge_num
  250. if 'max_edge_num' in keys:
  251. if self._max_edge_num is None:
  252. self._max_edge_num = self._get_max_edge_num(all_edge_nums)
  253. infos['max_edge_num'] = self._max_edge_num
  254. if 'min_edge_num' in keys:
  255. if self._min_edge_num is None:
  256. self._min_edge_num = self._get_min_edge_num(all_edge_nums)
  257. infos['min_edge_num'] = self._min_edge_num
  258. # label number
  259. if 'node_label_dim' in keys:
  260. if self._node_label_dim is None:
  261. self._node_label_dim = self._get_node_label_dim()
  262. infos['node_label_dim'] = self._node_label_dim
  263. if 'node_label_nums' in keys:
  264. if self._node_label_nums is None:
  265. self._node_label_nums = {}
  266. for node_label in self._node_labels:
  267. self._node_label_nums[node_label] = self._get_node_label_num(node_label)
  268. infos['node_label_nums'] = self._node_label_nums
  269. if 'edge_label_dim' in keys:
  270. if self._edge_label_dim is None:
  271. self._edge_label_dim = self._get_edge_label_dim()
  272. infos['edge_label_dim'] = self._edge_label_dim
  273. if 'edge_label_nums' in keys:
  274. if self._edge_label_nums is None:
  275. self._edge_label_nums = {}
  276. for edge_label in self._edge_labels:
  277. self._edge_label_nums[edge_label] = self._get_edge_label_num(edge_label)
  278. infos['edge_label_nums'] = self._edge_label_nums
  279. if 'directed' in keys or 'substructures' in keys:
  280. if self._directed is None:
  281. self._directed = self._is_directed()
  282. infos['directed'] = self._directed
  283. # node degree
  284. if any(i in keys for i in ['ave_node_degree', 'max_node_degree', 'min_node_degree']):
  285. all_node_degrees = self._get_all_node_degrees()
  286. if 'ave_node_degree' in keys:
  287. if self._ave_node_degree is None:
  288. self._ave_node_degree = self._get_ave_node_degree(all_node_degrees)
  289. infos['ave_node_degree'] = self._ave_node_degree
  290. if 'max_node_degree' in keys:
  291. if self._max_node_degree is None:
  292. self._max_node_degree = self._get_max_node_degree(all_node_degrees)
  293. infos['max_node_degree'] = self._max_node_degree
  294. if 'min_node_degree' in keys:
  295. if self._min_node_degree is None:
  296. self._min_node_degree = self._get_min_node_degree(all_node_degrees)
  297. infos['min_node_degree'] = self._min_node_degree
  298. # fill factor
  299. if any(i in keys for i in ['ave_fill_factor', 'max_fill_factor', 'min_fill_factor']):
  300. all_fill_factors = self._get_all_fill_factors()
  301. if 'ave_fill_factor' in keys:
  302. if self._ave_fill_factor is None:
  303. self._ave_fill_factor = self._get_ave_fill_factor(all_fill_factors)
  304. infos['ave_fill_factor'] = self._ave_fill_factor
  305. if 'max_fill_factor' in keys:
  306. if self._max_fill_factor is None:
  307. self._max_fill_factor = self._get_max_fill_factor(all_fill_factors)
  308. infos['max_fill_factor'] = self._max_fill_factor
  309. if 'min_fill_factor' in keys:
  310. if self._min_fill_factor is None:
  311. self._min_fill_factor = self._get_min_fill_factor(all_fill_factors)
  312. infos['min_fill_factor'] = self._min_fill_factor
  313. if 'substructures' in keys:
  314. if self._substructures is None:
  315. self._substructures = self._get_substructures()
  316. infos['substructures'] = self._substructures
  317. if 'class_number' in keys:
  318. if self._class_number is None:
  319. self._class_number = self._get_class_number()
  320. infos['class_number'] = self._class_number
  321. if 'node_attr_dim' in keys:
  322. if self._node_attr_dim is None:
  323. self._node_attr_dim = self._get_node_attr_dim()
  324. infos['node_attr_dim'] = self._node_attr_dim
  325. if 'edge_attr_dim' in keys:
  326. if self._edge_attr_dim is None:
  327. self._edge_attr_dim = self._get_edge_attr_dim()
  328. infos['edge_attr_dim'] = self._edge_attr_dim
  329. # entropy of degree distribution.
  330. if 'all_degree_entropy' in keys:
  331. if params is not None and ('all_degree_entropy' in params) and ('base' in params['all_degree_entropy']):
  332. base = params['all_degree_entropy']['base']
  333. else:
  334. base = None
  335. infos['all_degree_entropy'] = self._compute_all_degree_entropy(base=base)
  336. if 'ave_degree_entropy' in keys:
  337. if params is not None and ('ave_degree_entropy' in params) and ('base' in params['ave_degree_entropy']):
  338. base = params['ave_degree_entropy']['base']
  339. else:
  340. base = None
  341. infos['ave_degree_entropy'] = np.mean(self._compute_all_degree_entropy(base=base))
  342. return infos
  343. def print_graph_infos(self, infos):
  344. from collections import OrderedDict
  345. keys = list(infos.keys())
  346. print(OrderedDict(sorted(infos.items(), key=lambda i: keys.index(i[0]))))
  347. def remove_labels(self, node_labels=[], edge_labels=[], node_attrs=[], edge_attrs=[]):
  348. node_labels = [item for item in node_labels if item in self._node_labels]
  349. edge_labels = [item for item in edge_labels if item in self._edge_labels]
  350. node_attrs = [item for item in node_attrs if item in self._node_attrs]
  351. edge_attrs = [item for item in edge_attrs if item in self._edge_attrs]
  352. for g in self._graphs:
  353. for nd in g.nodes():
  354. for nl in node_labels:
  355. del g.nodes[nd][nl]
  356. for na in node_attrs:
  357. del g.nodes[nd][na]
  358. for ed in g.edges():
  359. for el in edge_labels:
  360. del g.edges[ed][el]
  361. for ea in edge_attrs:
  362. del g.edges[ed][ea]
  363. if len(node_labels) > 0:
  364. self._node_labels = [nl for nl in self._node_labels if nl not in node_labels]
  365. if len(edge_labels) > 0:
  366. self._edge_labels = [el for el in self._edge_labels if el not in edge_labels]
  367. if len(node_attrs) > 0:
  368. self._node_attrs = [na for na in self._node_attrs if na not in node_attrs]
  369. if len(edge_attrs) > 0:
  370. self._edge_attrs = [ea for ea in self._edge_attrs if ea not in edge_attrs]
  371. def clean_labels(self):
  372. labels = []
  373. for name in self._node_labels:
  374. label = set()
  375. for G in self._graphs:
  376. label = label | set(nx.get_node_attributes(G, name).values())
  377. if len(label) > 1:
  378. labels.append(name)
  379. break
  380. if len(label) < 2:
  381. for G in self._graphs:
  382. for nd in G.nodes():
  383. del G.nodes[nd][name]
  384. self._node_labels = labels
  385. labels = []
  386. for name in self._edge_labels:
  387. label = set()
  388. for G in self._graphs:
  389. label = label | set(nx.get_edge_attributes(G, name).values())
  390. if len(label) > 1:
  391. labels.append(name)
  392. break
  393. if len(label) < 2:
  394. for G in self._graphs:
  395. for ed in G.edges():
  396. del G.edges[ed][name]
  397. self._edge_labels = labels
  398. labels = []
  399. for name in self._node_attrs:
  400. label = set()
  401. for G in self._graphs:
  402. label = label | set(nx.get_node_attributes(G, name).values())
  403. if len(label) > 1:
  404. labels.append(name)
  405. break
  406. if len(label) < 2:
  407. for G in self._graphs:
  408. for nd in G.nodes():
  409. del G.nodes[nd][name]
  410. self._node_attrs = labels
  411. labels = []
  412. for name in self._edge_attrs:
  413. label = set()
  414. for G in self._graphs:
  415. label = label | set(nx.get_edge_attributes(G, name).values())
  416. if len(label) > 1:
  417. labels.append(name)
  418. break
  419. if len(label) < 2:
  420. for G in self._graphs:
  421. for ed in G.edges():
  422. del G.edges[ed][name]
  423. self._edge_attrs = labels
  424. def cut_graphs(self, range_):
  425. self._graphs = [self._graphs[i] for i in range_]
  426. if self._targets is not None:
  427. self._targets = [self._targets[i] for i in range_]
  428. self.clean_labels()
  429. def trim_dataset(self, edge_required=False):
  430. if edge_required:
  431. trimed_pairs = [(idx, g) for idx, g in enumerate(self._graphs) if (nx.number_of_nodes(g) != 0 and nx.number_of_edges(g) != 0)]
  432. else:
  433. trimed_pairs = [(idx, g) for idx, g in enumerate(self._graphs) if nx.number_of_nodes(g) != 0]
  434. idx = [p[0] for p in trimed_pairs]
  435. self._graphs = [p[1] for p in trimed_pairs]
  436. self._targets = [self._targets[i] for i in idx]
  437. self.clean_labels()
  438. def copy(self):
  439. dataset = Dataset()
  440. graphs = [g.copy() for g in self._graphs] if self._graphs is not None else None
  441. target = self._targets.copy() if self._targets is not None else None
  442. node_labels = self._node_labels.copy() if self._node_labels is not None else None
  443. node_attrs = self._node_attrs.copy() if self._node_attrs is not None else None
  444. edge_labels = self._edge_labels.copy() if self._edge_labels is not None else None
  445. edge_attrs = self._edge_attrs.copy() if self._edge_attrs is not None else None
  446. dataset.load_graphs(graphs, target)
  447. dataset.set_labels(node_labels=node_labels, node_attrs=node_attrs, edge_labels=edge_labels, edge_attrs=edge_attrs)
  448. # @todo: clean_labels and add other class members?
  449. return dataset
  450. def check_special_suffices(self):
  451. if self._ds_name.endswith('_unlabeled'):
  452. self.remove_labels(node_labels=self._node_labels,
  453. edge_labels=self._edge_labels,
  454. node_attrs=self._node_attrs,
  455. edge_attrs=self._edge_attrs)
  456. def get_all_node_labels(self):
  457. node_labels = []
  458. for g in self._graphs:
  459. for n in g.nodes():
  460. nl = tuple(g.nodes[n].items())
  461. if nl not in node_labels:
  462. node_labels.append(nl)
  463. return node_labels
  464. def get_all_edge_labels(self):
  465. edge_labels = []
  466. for g in self._graphs:
  467. for e in g.edges():
  468. el = tuple(g.edges[e].items())
  469. if el not in edge_labels:
  470. edge_labels.append(el)
  471. return edge_labels
  472. def _get_dataset_size(self):
  473. return len(self._graphs)
  474. def _get_all_node_nums(self):
  475. return [nx.number_of_nodes(G) for G in self._graphs]
  476. def _get_total_node_nums(self, all_node_nums):
  477. return np.sum(all_node_nums)
  478. def _get_ave_node_num(self, all_node_nums):
  479. return np.mean(all_node_nums)
  480. def _get_min_node_num(self, all_node_nums):
  481. return np.amin(all_node_nums)
  482. def _get_max_node_num(self, all_node_nums):
  483. return np.amax(all_node_nums)
  484. def _get_all_edge_nums(self):
  485. return [nx.number_of_edges(G) for G in self._graphs]
  486. def _get_total_edge_nums(self, all_edge_nums):
  487. return np.sum(all_edge_nums)
  488. def _get_ave_edge_num(self, all_edge_nums):
  489. return np.mean(all_edge_nums)
  490. def _get_min_edge_num(self, all_edge_nums):
  491. return np.amin(all_edge_nums)
  492. def _get_max_edge_num(self, all_edge_nums):
  493. return np.amax(all_edge_nums)
  494. def _get_node_label_dim(self):
  495. return len(self._node_labels)
  496. def _get_node_label_num(self, node_label):
  497. nl = set()
  498. for G in self._graphs:
  499. nl = nl | set(nx.get_node_attributes(G, node_label).values())
  500. return len(nl)
  501. def _get_edge_label_dim(self):
  502. return len(self._edge_labels)
  503. def _get_edge_label_num(self, edge_label):
  504. el = set()
  505. for G in self._graphs:
  506. el = el | set(nx.get_edge_attributes(G, edge_label).values())
  507. return len(el)
  508. def _is_directed(self):
  509. return nx.is_directed(self._graphs[0])
  510. def _get_all_node_degrees(self):
  511. return [np.mean(list(dict(G.degree()).values())) for G in self._graphs]
  512. def _get_ave_node_degree(self, all_node_degrees):
  513. return np.mean(all_node_degrees)
  514. def _get_max_node_degree(self, all_node_degrees):
  515. return np.amax(all_node_degrees)
  516. def _get_min_node_degree(self, all_node_degrees):
  517. return np.amin(all_node_degrees)
  518. def _get_all_fill_factors(self):
  519. """Get fill factor, the number of non-zero entries in the adjacency matrix.
  520. Returns
  521. -------
  522. list[float]
  523. List of fill factors for all graphs.
  524. """
  525. return [nx.number_of_edges(G) / (nx.number_of_nodes(G) ** 2) for G in self._graphs]
  526. def _get_ave_fill_factor(self, all_fill_factors):
  527. return np.mean(all_fill_factors)
  528. def _get_max_fill_factor(self, all_fill_factors):
  529. return np.amax(all_fill_factors)
  530. def _get_min_fill_factor(self, all_fill_factors):
  531. return np.amin(all_fill_factors)
  532. def _get_substructures(self):
  533. subs = set()
  534. for G in self._graphs:
  535. degrees = list(dict(G.degree()).values())
  536. if any(i == 2 for i in degrees):
  537. subs.add('linear')
  538. if np.amax(degrees) >= 3:
  539. subs.add('non linear')
  540. if 'linear' in subs and 'non linear' in subs:
  541. break
  542. if self._directed:
  543. for G in self._graphs:
  544. if len(list(nx.find_cycle(G))) > 0:
  545. subs.add('cyclic')
  546. break
  547. # else:
  548. # # @todo: this method does not work for big graph with large amount of edges like D&D, try a better way.
  549. # upper = np.amin([nx.number_of_edges(G) for G in Gn]) * 2 + 10
  550. # for G in Gn:
  551. # if (nx.number_of_edges(G) < upper):
  552. # cyc = list(nx.simple_cycles(G.to_directed()))
  553. # if any(len(i) > 2 for i in cyc):
  554. # subs.add('cyclic')
  555. # break
  556. # if 'cyclic' not in subs:
  557. # for G in Gn:
  558. # cyc = list(nx.simple_cycles(G.to_directed()))
  559. # if any(len(i) > 2 for i in cyc):
  560. # subs.add('cyclic')
  561. # break
  562. return subs
  563. def _get_class_num(self):
  564. return len(set(self._targets))
  565. def _get_node_attr_dim(self):
  566. return len(self._node_attrs)
  567. def _get_edge_attr_dim(self):
  568. return len(self._edge_attrs)
  569. def _compute_all_degree_entropy(self, base=None):
  570. """Compute the entropy of degree distribution of each graph.
  571. Parameters
  572. ----------
  573. base : float, optional
  574. The logarithmic base to use. The default is ``e`` (natural logarithm).
  575. Returns
  576. -------
  577. degree_entropy : float
  578. The calculated entropy.
  579. """
  580. from gklearn.utils.stats import entropy
  581. degree_entropy = []
  582. for g in self._graphs:
  583. degrees = list(dict(g.degree()).values())
  584. en = entropy(degrees, base=base)
  585. degree_entropy.append(en)
  586. return degree_entropy
  587. @property
  588. def graphs(self):
  589. return self._graphs
  590. @property
  591. def targets(self):
  592. return self._targets
  593. @property
  594. def node_labels(self):
  595. return self._node_labels
  596. @property
  597. def edge_labels(self):
  598. return self._edge_labels
  599. @property
  600. def node_attrs(self):
  601. return self._node_attrs
  602. @property
  603. def edge_attrs(self):
  604. return self._edge_attrs
  605. def split_dataset_by_target(dataset):
  606. from gklearn.preimage.utils import get_same_item_indices
  607. graphs = dataset.graphs
  608. targets = dataset.targets
  609. datasets = []
  610. idx_targets = get_same_item_indices(targets)
  611. for key, val in idx_targets.items():
  612. sub_graphs = [graphs[i] for i in val]
  613. sub_dataset = Dataset()
  614. sub_dataset.load_graphs(sub_graphs, [key] * len(val))
  615. node_labels = dataset.node_labels.copy() if dataset.node_labels is not None else None
  616. node_attrs = dataset.node_attrs.copy() if dataset.node_attrs is not None else None
  617. edge_labels = dataset.edge_labels.copy() if dataset.edge_labels is not None else None
  618. edge_attrs = dataset.edge_attrs.copy() if dataset.edge_attrs is not None else None
  619. sub_dataset.set_labels(node_labels=node_labels, node_attrs=node_attrs, edge_labels=edge_labels, edge_attrs=edge_attrs)
  620. datasets.append(sub_dataset)
  621. # @todo: clean_labels?
  622. return datasets

A Python package for graph kernels, graph edit distances and graph pre-image problem.