You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

dataset.py 19 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. """
  4. Created on Thu Mar 26 18:48:27 2020
  5. @author: ljia
  6. """
  7. import numpy as np
  8. import networkx as nx
  9. from gklearn.utils.graphfiles import loadDataset
  10. import os
  11. class Dataset(object):
  12. def __init__(self, filename=None, filename_y=None, extra_params=None):
  13. if filename is None:
  14. self.__graphs = None
  15. self.__targets = None
  16. self.__node_labels = None
  17. self.__edge_labels = None
  18. self.__node_attrs = None
  19. self.__edge_attrs = None
  20. else:
  21. self.load_dataset(filename, filename_y=filename_y, extra_params=extra_params)
  22. self.__substructures = None
  23. self.__node_label_dim = None
  24. self.__edge_label_dim = None
  25. self.__directed = None
  26. self.__dataset_size = None
  27. self.__total_node_num = None
  28. self.__ave_node_num = None
  29. self.__min_node_num = None
  30. self.__max_node_num = None
  31. self.__total_edge_num = None
  32. self.__ave_edge_num = None
  33. self.__min_edge_num = None
  34. self.__max_edge_num = None
  35. self.__ave_node_degree = None
  36. self.__min_node_degree = None
  37. self.__max_node_degree = None
  38. self.__ave_fill_factor = None
  39. self.__min_fill_factor = None
  40. self.__max_fill_factor = None
  41. self.__node_label_nums = None
  42. self.__edge_label_nums = None
  43. self.__node_attr_dim = None
  44. self.__edge_attr_dim = None
  45. self.__class_number = None
  46. def load_dataset(self, filename, filename_y=None, extra_params=None):
  47. self.__graphs, self.__targets = loadDataset(filename, filename_y=filename_y, extra_params=extra_params)
  48. self.set_labels_attrs()
  49. def load_graphs(self, graphs, targets=None):
  50. # this has to be followed by set_labels().
  51. self.__graphs = graphs
  52. self.__targets = targets
  53. # self.set_labels_attrs()
  54. def load_predefined_dataset(self, ds_name):
  55. current_path = os.path.dirname(os.path.realpath(__file__)) + '/'
  56. if ds_name == 'Letter-high': # node non-symb
  57. ds_file = current_path + '../../datasets/Letter-high/Letter-high_A.txt'
  58. self.__graphs, self.__targets = loadDataset(ds_file)
  59. elif ds_name == 'Letter-med': # node non-symb
  60. ds_file = current_path + '../../datasets/Letter-high/Letter-med_A.txt'
  61. self.__graphs, self.__targets = loadDataset(ds_file)
  62. elif ds_name == 'Letter-low': # node non-symb
  63. ds_file = current_path + '../../datasets/Letter-high/Letter-low_A.txt'
  64. self.__graphs, self.__targets = loadDataset(ds_file)
  65. elif ds_name == 'Fingerprint':
  66. ds_file = current_path + '../../datasets/Fingerprint/Fingerprint_A.txt'
  67. self.__graphs, self.__targets = loadDataset(ds_file)
  68. elif ds_name == 'SYNTHETIC':
  69. pass
  70. elif ds_name == 'SYNTHETICnew':
  71. ds_file = current_path + '../../datasets/SYNTHETICnew/SYNTHETICnew_A.txt'
  72. self.__graphs, self.__targets = loadDataset(ds_file)
  73. elif ds_name == 'Synthie':
  74. pass
  75. elif ds_name == 'COIL-DEL':
  76. ds_file = current_path + '../../datasets/COIL-DEL/COIL-DEL_A.txt'
  77. self.__graphs, self.__targets = loadDataset(ds_file)
  78. elif ds_name == 'COIL-RAG':
  79. pass
  80. elif ds_name == 'COLORS-3':
  81. pass
  82. elif ds_name == 'FRANKENSTEIN':
  83. pass
  84. self.set_labels_attrs()
  85. def set_labels(self, node_labels=[], node_attrs=[], edge_labels=[], edge_attrs=[]):
  86. self.__node_labels = node_labels
  87. self.__node_attrs = node_attrs
  88. self.__edge_labels = edge_labels
  89. self.__edge_attrs = edge_attrs
  90. def set_labels_attrs(self, node_labels=None, node_attrs=None, edge_labels=None, edge_attrs=None):
  91. # @todo: remove labels which have only one possible values.
  92. if node_labels is None:
  93. self.__node_labels = self.__graphs[0].graph['node_labels']
  94. # # graphs are considered node unlabeled if all nodes have the same label.
  95. # infos.update({'node_labeled': is_nl if node_label_num > 1 else False})
  96. if node_attrs is None:
  97. self.__node_attrs = self.__graphs[0].graph['node_attrs']
  98. # for G in Gn:
  99. # for n in G.nodes(data=True):
  100. # if 'attributes' in n[1]:
  101. # return len(n[1]['attributes'])
  102. # return 0
  103. if edge_labels is None:
  104. self.__edge_labels = self.__graphs[0].graph['edge_labels']
  105. # # graphs are considered edge unlabeled if all edges have the same label.
  106. # infos.update({'edge_labeled': is_el if edge_label_num > 1 else False})
  107. if edge_attrs is None:
  108. self.__edge_attrs = self.__graphs[0].graph['edge_attrs']
  109. # for G in Gn:
  110. # if nx.number_of_edges(G) > 0:
  111. # for e in G.edges(data=True):
  112. # if 'attributes' in e[2]:
  113. # return len(e[2]['attributes'])
  114. # return 0
  115. def get_dataset_infos(self, keys=None):
  116. """Computes and returns the structure and property information of the graph dataset.
  117. Parameters
  118. ----------
  119. keys : list
  120. List of strings which indicate which informations will be returned. The
  121. possible choices includes:
  122. 'substructures': sub-structures graphs contains, including 'linear', 'non
  123. linear' and 'cyclic'.
  124. 'node_label_dim': whether vertices have symbolic labels.
  125. 'edge_label_dim': whether egdes have symbolic labels.
  126. 'directed': whether graphs in dataset are directed.
  127. 'dataset_size': number of graphs in dataset.
  128. 'total_node_num': total number of vertices of all graphs in dataset.
  129. 'ave_node_num': average number of vertices of graphs in dataset.
  130. 'min_node_num': minimum number of vertices of graphs in dataset.
  131. 'max_node_num': maximum number of vertices of graphs in dataset.
  132. 'total_edge_num': total number of edges of all graphs in dataset.
  133. 'ave_edge_num': average number of edges of graphs in dataset.
  134. 'min_edge_num': minimum number of edges of graphs in dataset.
  135. 'max_edge_num': maximum number of edges of graphs in dataset.
  136. 'ave_node_degree': average vertex degree of graphs in dataset.
  137. 'min_node_degree': minimum vertex degree of graphs in dataset.
  138. 'max_node_degree': maximum vertex degree of graphs in dataset.
  139. 'ave_fill_factor': average fill factor (number_of_edges /
  140. (number_of_nodes ** 2)) of graphs in dataset.
  141. 'min_fill_factor': minimum fill factor of graphs in dataset.
  142. 'max_fill_factor': maximum fill factor of graphs in dataset.
  143. 'node_label_nums': list of numbers of symbolic vertex labels of graphs in dataset.
  144. 'edge_label_nums': list number of symbolic edge labels of graphs in dataset.
  145. 'node_attr_dim': number of dimensions of non-symbolic vertex labels.
  146. Extracted from the 'attributes' attribute of graph nodes.
  147. 'edge_attr_dim': number of dimensions of non-symbolic edge labels.
  148. Extracted from the 'attributes' attribute of graph edges.
  149. 'class_number': number of classes. Only available for classification problems.
  150. All informations above will be returned if `keys` is not given.
  151. Return
  152. ------
  153. dict
  154. Information of the graph dataset keyed by `keys`.
  155. """
  156. infos = {}
  157. if keys == None:
  158. keys = [
  159. 'substructures',
  160. 'node_label_dim',
  161. 'edge_label_dim',
  162. 'directed',
  163. 'dataset_size',
  164. 'total_node_num',
  165. 'ave_node_num',
  166. 'min_node_num',
  167. 'max_node_num',
  168. 'total_edge_num',
  169. 'ave_edge_num',
  170. 'min_edge_num',
  171. 'max_edge_num',
  172. 'ave_node_degree',
  173. 'min_node_degree',
  174. 'max_node_degree',
  175. 'ave_fill_factor',
  176. 'min_fill_factor',
  177. 'max_fill_factor',
  178. 'node_label_nums',
  179. 'edge_label_nums',
  180. 'node_attr_dim',
  181. 'edge_attr_dim',
  182. 'class_number',
  183. ]
  184. # dataset size
  185. if 'dataset_size' in keys:
  186. if self.__dataset_size is None:
  187. self.__dataset_size = self.__get_dataset_size()
  188. infos['dataset_size'] = self.__dataset_size
  189. # graph node number
  190. if any(i in keys for i in ['total_node_num', 'ave_node_num', 'min_node_num', 'max_node_num']):
  191. all_node_nums = self.__get_all_node_nums()
  192. if 'total_node_num' in keys:
  193. if self.__total_node_num is None:
  194. self.__total_node_num = self.__get_total_node_num(all_node_nums)
  195. infos['total_node_num'] = self.__total_node_num
  196. if 'ave_node_num' in keys:
  197. if self.__ave_node_num is None:
  198. self.__ave_node_num = self.__get_ave_node_num(all_node_nums)
  199. infos['ave_node_num'] = self.__ave_node_num
  200. if 'min_node_num' in keys:
  201. if self.__min_node_num is None:
  202. self.__min_node_num = self.__get_min_node_num(all_node_nums)
  203. infos['min_node_num'] = self.__min_node_num
  204. if 'max_node_num' in keys:
  205. if self.__max_node_num is None:
  206. self.__max_node_num = self.__get_max_node_num(all_node_nums)
  207. infos['max_node_num'] = self.__max_node_num
  208. # graph edge number
  209. if any(i in keys for i in ['total_edge_num', 'ave_edge_num', 'min_edge_num', 'max_edge_num']):
  210. all_edge_nums = self.__get_all_edge_nums()
  211. if 'total_edge_num' in keys:
  212. if self.__total_edge_num is None:
  213. self.__total_edge_num = self.__get_total_edge_num(all_edge_nums)
  214. infos['total_edge_num'] = self.__total_edge_num
  215. if 'ave_edge_num' in keys:
  216. if self.__ave_edge_num is None:
  217. self.__ave_edge_num = self.__get_ave_edge_num(all_edge_nums)
  218. infos['ave_edge_num'] = self.__ave_edge_num
  219. if 'max_edge_num' in keys:
  220. if self.__max_edge_num is None:
  221. self.__max_edge_num = self.__get_max_edge_num(all_edge_nums)
  222. infos['max_edge_num'] = self.__max_edge_num
  223. if 'min_edge_num' in keys:
  224. if self.__min_edge_num is None:
  225. self.__min_edge_num = self.__get_min_edge_num(all_edge_nums)
  226. infos['min_edge_num'] = self.__min_edge_num
  227. # label number
  228. if 'node_label_dim' in keys:
  229. if self.__node_label_dim is None:
  230. self.__node_label_dim = self.__get_node_label_dim()
  231. infos['node_label_dim'] = self.__node_label_dim
  232. if 'node_label_nums' in keys:
  233. if self.__node_label_nums is None:
  234. self.__node_label_nums = {}
  235. for node_label in self.__node_labels:
  236. self.__node_label_nums[node_label] = self.get_node_label_num(node_label)
  237. infos['node_label_nums'] = self.__node_label_nums
  238. if 'edge_label_dim' in keys:
  239. if self.__edge_label_dim is None:
  240. self.__edge_label_dim = self.__get_edge_label_dim()
  241. infos['edge_label_dim'] = self.__edge_label_dim
  242. if 'edge_label_nums' in keys:
  243. if self.__edge_label_nums is None:
  244. self.__edge_label_nums = {}
  245. for edge_label in self.__edge_labels:
  246. self.__edge_label_nums[edge_label] = self.get_edge_label_num(edge_label)
  247. infos['edge_label_nums'] = self.__edge_label_nums
  248. if 'directed' in keys or 'substructures' in keys:
  249. if self.__directed is None:
  250. self.__directed = self.__is_directed()
  251. infos['directed'] = self.__directed
  252. # node degree
  253. if any(i in keys for i in ['ave_node_degree', 'max_node_degree', 'min_node_degree']):
  254. all_node_degrees = self.__get_all_node_degrees()
  255. if 'ave_node_degree' in keys:
  256. if self.__ave_node_degree is None:
  257. self.__ave_node_degree = self.__get_ave_node_degree(all_node_degrees)
  258. infos['ave_node_degree'] = self.__ave_node_degree
  259. if 'max_node_degree' in keys:
  260. if self.__max_node_degree is None:
  261. self.__max_node_degree = self.__get_max_node_degree(all_node_degrees)
  262. infos['max_node_degree'] = self.__max_node_degree
  263. if 'min_node_degree' in keys:
  264. if self.__min_node_degree is None:
  265. self.__min_node_degree = self.__get_min_node_degree(all_node_degrees)
  266. infos['min_node_degree'] = self.__min_node_degree
  267. # fill factor
  268. if any(i in keys for i in ['ave_fill_factor', 'max_fill_factor', 'min_fill_factor']):
  269. all_fill_factors = self.__get_all_fill_factors()
  270. if 'ave_fill_factor' in keys:
  271. if self.__ave_fill_factor is None:
  272. self.__ave_fill_factor = self.__get_ave_fill_factor(all_fill_factors)
  273. infos['ave_fill_factor'] = self.__ave_fill_factor
  274. if 'max_fill_factor' in keys:
  275. if self.__max_fill_factor is None:
  276. self.__max_fill_factor = self.__get_max_fill_factor(all_fill_factors)
  277. infos['max_fill_factor'] = self.__max_fill_factor
  278. if 'min_fill_factor' in keys:
  279. if self.__min_fill_factor is None:
  280. self.__min_fill_factor = self.__get_min_fill_factor(all_fill_factors)
  281. infos['min_fill_factor'] = self.__min_fill_factor
  282. if 'substructures' in keys:
  283. if self.__substructures is None:
  284. self.__substructures = self.__get_substructures()
  285. infos['substructures'] = self.__substructures
  286. if 'class_number' in keys:
  287. if self.__class_number is None:
  288. self.__class_number = self.__get_class_number()
  289. infos['class_number'] = self.__class_number
  290. if 'node_attr_dim' in keys:
  291. if self.__node_attr_dim is None:
  292. self.__node_attr_dim = self.__get_node_attr_dim()
  293. infos['node_attr_dim'] = self.__node_attr_dim
  294. if 'edge_attr_dim' in keys:
  295. if self.__edge_attr_dim is None:
  296. self.__edge_attr_dim = self.__get_edge_attr_dim()
  297. infos['edge_attr_dim'] = self.__edge_attr_dim
  298. return infos
  299. def print_graph_infos(self, infos):
  300. from collections import OrderedDict
  301. keys = list(infos.keys())
  302. print(OrderedDict(sorted(infos.items(), key=lambda i: keys.index(i[0]))))
  303. def remove_labels(self, node_labels=[], edge_labels=[], node_attrs=[], edge_attrs=[]):
  304. for g in self.__graphs:
  305. for nd in g.nodes():
  306. for nl in node_labels:
  307. del g.nodes[nd][nl]
  308. for na in node_attrs:
  309. del g.nodes[nd][na]
  310. for ed in g.edges():
  311. for el in edge_labels:
  312. del g.edges[ed][el]
  313. for ea in edge_attrs:
  314. del g.edges[ed][ea]
  315. if len(node_labels) > 0:
  316. self.__node_labels = [nl for nl in self.__node_labels if nl not in node_labels]
  317. if len(edge_labels) > 0:
  318. self.__edge_labels = [el for el in self.__edge_labels if el not in edge_labels]
  319. if len(node_attrs) > 0:
  320. self.__node_attrs = [na for na in self.__node_attrs if na not in node_attrs]
  321. if len(edge_attrs) > 0:
  322. self.__edge_attrs = [ea for ea in self.__edge_attrs if ea not in edge_attrs]
  323. def cut_graphs(self, range_):
  324. self.__graphs = [self.__graphs[i] for i in range_]
  325. if self.__targets is not None:
  326. self.__targets = [self.__targets[i] for i in range_]
  327. # @todo
  328. # self.set_labels_attrs()
  329. def trim_dataset(self, edge_required=False):
  330. if edge_required:
  331. trimed_pairs = [(idx, g) for idx, g in enumerate(self.__graphs) if (nx.number_of_nodes(g) != 0 and nx.number_of_edges(g) != 0)]
  332. else:
  333. trimed_pairs = [(idx, g) for idx, g in enumerate(self.__graphs) if nx.number_of_nodes(g) != 0]
  334. idx = [p[0] for p in trimed_pairs]
  335. self.__graphs = [p[1] for p in trimed_pairs]
  336. self.__targets = [self.__targets[i] for i in idx]
  337. # @todo
  338. # self.set_labels_attrs()
  339. def __get_dataset_size(self):
  340. return len(self.__graphs)
  341. def __get_all_node_nums(self):
  342. return [nx.number_of_nodes(G) for G in self.__graphs]
  343. def __get_total_node_nums(self, all_node_nums):
  344. return np.sum(all_node_nums)
  345. def __get_ave_node_num(self, all_node_nums):
  346. return np.mean(all_node_nums)
  347. def __get_min_node_num(self, all_node_nums):
  348. return np.amin(all_node_nums)
  349. def __get_max_node_num(self, all_node_nums):
  350. return np.amax(all_node_nums)
  351. def __get_all_edge_nums(self):
  352. return [nx.number_of_edges(G) for G in self.__graphs]
  353. def __get_total_edge_nums(self, all_edge_nums):
  354. return np.sum(all_edge_nums)
  355. def __get_ave_edge_num(self, all_edge_nums):
  356. return np.mean(all_edge_nums)
  357. def __get_min_edge_num(self, all_edge_nums):
  358. return np.amin(all_edge_nums)
  359. def __get_max_edge_num(self, all_edge_nums):
  360. return np.amax(all_edge_nums)
  361. def __get_node_label_dim(self):
  362. return len(self.__node_labels)
  363. def __get_node_label_num(self, node_label):
  364. nl = set()
  365. for G in self.__graphs:
  366. nl = nl | set(nx.get_node_attributes(G, node_label).values())
  367. return len(nl)
  368. def __get_edge_label_dim(self):
  369. return len(self.__edge_labels)
  370. def __get_edge_label_num(self, edge_label):
  371. el = set()
  372. for G in self.__graphs:
  373. el = el | set(nx.get_edge_attributes(G, edge_label).values())
  374. return len(el)
  375. def __is_directed(self):
  376. return nx.is_directed(self.__graphs[0])
  377. def __get_all_node_degrees(self):
  378. return [np.mean(list(dict(G.degree()).values())) for G in self.__graphs]
  379. def __get_ave_node_degree(self, all_node_degrees):
  380. return np.mean(all_node_degrees)
  381. def __get_max_node_degree(self, all_node_degrees):
  382. return np.amax(all_node_degrees)
  383. def __get_min_node_degree(self, all_node_degrees):
  384. return np.amin(all_node_degrees)
  385. def __get_all_fill_factors(self):
  386. """
  387. Get fill factor, the number of non-zero entries in the adjacency matrix.
  388. Returns
  389. -------
  390. list[float]
  391. List of fill factors for all graphs.
  392. """
  393. return [nx.number_of_edges(G) / (nx.number_of_nodes(G) ** 2) for G in self.__graphs]
  394. def __get_ave_fill_factor(self, all_fill_factors):
  395. return np.mean(all_fill_factors)
  396. def __get_max_fill_factor(self, all_fill_factors):
  397. return np.amax(all_fill_factors)
  398. def __get_min_fill_factor(self, all_fill_factors):
  399. return np.amin(all_fill_factors)
  400. def __get_substructures(self):
  401. subs = set()
  402. for G in self.__graphs:
  403. degrees = list(dict(G.degree()).values())
  404. if any(i == 2 for i in degrees):
  405. subs.add('linear')
  406. if np.amax(degrees) >= 3:
  407. subs.add('non linear')
  408. if 'linear' in subs and 'non linear' in subs:
  409. break
  410. if self.__directed:
  411. for G in self.__graphs:
  412. if len(list(nx.find_cycle(G))) > 0:
  413. subs.add('cyclic')
  414. break
  415. # else:
  416. # # @todo: this method does not work for big graph with large amount of edges like D&D, try a better way.
  417. # upper = np.amin([nx.number_of_edges(G) for G in Gn]) * 2 + 10
  418. # for G in Gn:
  419. # if (nx.number_of_edges(G) < upper):
  420. # cyc = list(nx.simple_cycles(G.to_directed()))
  421. # if any(len(i) > 2 for i in cyc):
  422. # subs.add('cyclic')
  423. # break
  424. # if 'cyclic' not in subs:
  425. # for G in Gn:
  426. # cyc = list(nx.simple_cycles(G.to_directed()))
  427. # if any(len(i) > 2 for i in cyc):
  428. # subs.add('cyclic')
  429. # break
  430. return subs
  431. def __get_class_num(self):
  432. return len(set(self.__targets))
  433. def __get_node_attr_dim(self):
  434. return len(self.__node_attrs)
  435. def __get_edge_attr_dim(self):
  436. return len(self.__edge_attrs)
  437. @property
  438. def graphs(self):
  439. return self.__graphs
  440. @property
  441. def targets(self):
  442. return self.__targets
  443. @property
  444. def node_labels(self):
  445. return self.__node_labels
  446. @property
  447. def edge_labels(self):
  448. return self.__edge_labels
  449. @property
  450. def node_attrs(self):
  451. return self.__node_attrs
  452. @property
  453. def edge_attrs(self):
  454. return self.__edge_attrs
  455. def split_dataset_by_target(dataset):
  456. from gklearn.preimage.utils import get_same_item_indices
  457. graphs = dataset.graphs
  458. targets = dataset.targets
  459. datasets = []
  460. idx_targets = get_same_item_indices(targets)
  461. for key, val in idx_targets.items():
  462. sub_graphs = [graphs[i] for i in val]
  463. sub_dataset = Dataset()
  464. sub_dataset.load_graphs(sub_graphs, [key] * len(val))
  465. sub_dataset.set_labels(node_labels=dataset.node_labels, node_attrs=dataset.node_attrs, edge_labels=dataset.edge_labels, edge_attrs=dataset.edge_attrs)
  466. datasets.append(sub_dataset)
  467. return datasets

A Python package for graph kernels, graph edit distances and graph pre-image problem.