You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_graph_kernels.py 19 kB

5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486
  1. """Tests of graph kernels.
  2. """
  3. import pytest
  4. import multiprocessing
  5. import numpy as np
  6. def chooseDataset(ds_name):
  7. """Choose dataset according to name.
  8. """
  9. from gklearn.utils import Dataset
  10. dataset = Dataset()
  11. # no node labels (and no edge labels).
  12. if ds_name == 'Alkane':
  13. dataset.load_predefined_dataset(ds_name)
  14. dataset.trim_dataset(edge_required=False)
  15. irrelevant_labels = {'node_attrs': ['x', 'y', 'z'], 'edge_labels': ['bond_stereo']}
  16. dataset.remove_labels(**irrelevant_labels)
  17. # node symbolic labels.
  18. elif ds_name == 'Acyclic':
  19. dataset.load_predefined_dataset(ds_name)
  20. dataset.trim_dataset(edge_required=False)
  21. irrelevant_labels = {'node_attrs': ['x', 'y', 'z'], 'edge_labels': ['bond_stereo']}
  22. dataset.remove_labels(**irrelevant_labels)
  23. # node non-symbolic labels.
  24. elif ds_name == 'Letter-med':
  25. dataset.load_predefined_dataset(ds_name)
  26. dataset.trim_dataset(edge_required=False)
  27. # node symbolic and non-symbolic labels (and edge symbolic labels).
  28. elif ds_name == 'AIDS':
  29. dataset.load_predefined_dataset(ds_name)
  30. dataset.trim_dataset(edge_required=False)
  31. # edge non-symbolic labels (no node labels).
  32. elif ds_name == 'Fingerprint_edge':
  33. dataset.load_predefined_dataset('Fingerprint')
  34. dataset.trim_dataset(edge_required=True)
  35. irrelevant_labels = {'edge_attrs': ['orient', 'angle']}
  36. dataset.remove_labels(**irrelevant_labels)
  37. # edge non-symbolic labels (and node non-symbolic labels).
  38. elif ds_name == 'Fingerprint':
  39. dataset.load_predefined_dataset(ds_name)
  40. dataset.trim_dataset(edge_required=True)
  41. # edge symbolic and non-symbolic labels (and node symbolic and non-symbolic labels).
  42. elif ds_name == 'Cuneiform':
  43. dataset.load_predefined_dataset(ds_name)
  44. dataset.trim_dataset(edge_required=True)
  45. dataset.cut_graphs(range(0, 3))
  46. return dataset
  47. def test_list_graph_kernels():
  48. """
  49. """
  50. from gklearn.kernels import GRAPH_KERNELS, list_of_graph_kernels
  51. assert list_of_graph_kernels() == [i for i in GRAPH_KERNELS]
  52. @pytest.mark.parametrize('ds_name', ['Alkane', 'AIDS'])
  53. @pytest.mark.parametrize('weight,compute_method', [(0.01, 'geo'), (1, 'exp')])
  54. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  55. def test_CommonWalk(ds_name, parallel, weight, compute_method):
  56. """Test common walk kernel.
  57. """
  58. from gklearn.kernels import CommonWalk
  59. import networkx as nx
  60. dataset = chooseDataset(ds_name)
  61. dataset.load_graphs([g for g in dataset.graphs if nx.number_of_nodes(g) > 1])
  62. try:
  63. graph_kernel = CommonWalk(node_labels=dataset.node_labels,
  64. edge_labels=dataset.edge_labels,
  65. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  66. weight=weight,
  67. compute_method=compute_method)
  68. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  69. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  70. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  71. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  72. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  73. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  74. except Exception as exception:
  75. assert False, exception
  76. @pytest.mark.parametrize('ds_name', ['Alkane', 'AIDS'])
  77. @pytest.mark.parametrize('remove_totters', [False]) #[True, False])
  78. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  79. def test_Marginalized(ds_name, parallel, remove_totters):
  80. """Test marginalized kernel.
  81. """
  82. from gklearn.kernels import Marginalized
  83. dataset = chooseDataset(ds_name)
  84. try:
  85. graph_kernel = Marginalized(node_labels=dataset.node_labels,
  86. edge_labels=dataset.edge_labels,
  87. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  88. p_quit=0.5,
  89. n_iteration=2,
  90. remove_totters=remove_totters)
  91. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  92. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  93. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  94. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  95. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  96. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  97. except Exception as exception:
  98. assert False, exception
  99. @pytest.mark.parametrize('ds_name', ['Acyclic'])
  100. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  101. def test_SylvesterEquation(ds_name, parallel):
  102. """Test sylvester equation kernel.
  103. """
  104. from gklearn.kernels import SylvesterEquation
  105. dataset = chooseDataset(ds_name)
  106. try:
  107. graph_kernel = SylvesterEquation(
  108. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  109. weight=1e-3,
  110. p=None,
  111. q=None,
  112. edge_weight=None)
  113. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  114. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  115. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  116. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  117. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  118. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  119. except Exception as exception:
  120. assert False, exception
  121. @pytest.mark.parametrize('ds_name', ['Acyclic', 'AIDS'])
  122. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  123. def test_ConjugateGradient(ds_name, parallel):
  124. """Test conjugate gradient kernel.
  125. """
  126. from gklearn.kernels import ConjugateGradient
  127. from gklearn.utils.kernels import deltakernel, gaussiankernel, kernelproduct
  128. import functools
  129. dataset = chooseDataset(ds_name)
  130. mixkernel = functools.partial(kernelproduct, deltakernel, gaussiankernel)
  131. sub_kernels = {'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}
  132. try:
  133. graph_kernel = ConjugateGradient(
  134. node_labels=dataset.node_labels,
  135. node_attrs=dataset.node_attrs,
  136. edge_labels=dataset.edge_labels,
  137. edge_attrs=dataset.edge_attrs,
  138. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  139. weight=1e-3,
  140. p=None,
  141. q=None,
  142. edge_weight=None,
  143. node_kernels=sub_kernels,
  144. edge_kernels=sub_kernels)
  145. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  146. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  147. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  148. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  149. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  150. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  151. except Exception as exception:
  152. assert False, exception
  153. @pytest.mark.parametrize('ds_name', ['Acyclic', 'AIDS'])
  154. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  155. def test_FixedPoint(ds_name, parallel):
  156. """Test fixed point kernel.
  157. """
  158. from gklearn.kernels import FixedPoint
  159. from gklearn.utils.kernels import deltakernel, gaussiankernel, kernelproduct
  160. import functools
  161. dataset = chooseDataset(ds_name)
  162. mixkernel = functools.partial(kernelproduct, deltakernel, gaussiankernel)
  163. sub_kernels = {'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}
  164. try:
  165. graph_kernel = FixedPoint(
  166. node_labels=dataset.node_labels,
  167. node_attrs=dataset.node_attrs,
  168. edge_labels=dataset.edge_labels,
  169. edge_attrs=dataset.edge_attrs,
  170. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  171. weight=1e-3,
  172. p=None,
  173. q=None,
  174. edge_weight=None,
  175. node_kernels=sub_kernels,
  176. edge_kernels=sub_kernels)
  177. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  178. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  179. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  180. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  181. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  182. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  183. except Exception as exception:
  184. assert False, exception
  185. @pytest.mark.parametrize('ds_name', ['Acyclic'])
  186. @pytest.mark.parametrize('sub_kernel', ['exp', 'geo'])
  187. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  188. def test_SpectralDecomposition(ds_name, sub_kernel, parallel):
  189. """Test spectral decomposition kernel.
  190. """
  191. from gklearn.kernels import SpectralDecomposition
  192. dataset = chooseDataset(ds_name)
  193. try:
  194. graph_kernel = SpectralDecomposition(
  195. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  196. weight=1e-3,
  197. p=None,
  198. q=None,
  199. edge_weight=None,
  200. sub_kernel=sub_kernel)
  201. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  202. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  203. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  204. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  205. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  206. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  207. except Exception as exception:
  208. assert False, exception
  209. # @pytest.mark.parametrize(
  210. # 'compute_method,ds_name,sub_kernel',
  211. # [
  212. # ('sylvester', 'Alkane', None),
  213. # ('conjugate', 'Alkane', None),
  214. # ('conjugate', 'AIDS', None),
  215. # ('fp', 'Alkane', None),
  216. # ('fp', 'AIDS', None),
  217. # ('spectral', 'Alkane', 'exp'),
  218. # ('spectral', 'Alkane', 'geo'),
  219. # ]
  220. # )
  221. # @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  222. # def test_RandomWalk(ds_name, compute_method, sub_kernel, parallel):
  223. # """Test random walk kernel.
  224. # """
  225. # from gklearn.kernels import RandomWalk
  226. # from gklearn.utils.kernels import deltakernel, gaussiankernel, kernelproduct
  227. # import functools
  228. #
  229. # dataset = chooseDataset(ds_name)
  230. # mixkernel = functools.partial(kernelproduct, deltakernel, gaussiankernel)
  231. # sub_kernels = {'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}
  232. # # try:
  233. # graph_kernel = RandomWalk(node_labels=dataset.node_labels,
  234. # node_attrs=dataset.node_attrs,
  235. # edge_labels=dataset.edge_labels,
  236. # edge_attrs=dataset.edge_attrs,
  237. # ds_infos=dataset.get_dataset_infos(keys=['directed']),
  238. # compute_method=compute_method,
  239. # weight=1e-3,
  240. # p=None,
  241. # q=None,
  242. # edge_weight=None,
  243. # node_kernels=sub_kernels,
  244. # edge_kernels=sub_kernels,
  245. # sub_kernel=sub_kernel)
  246. # gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  247. # parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  248. # kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  249. # parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  250. # kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  251. # parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  252. # except Exception as exception:
  253. # assert False, exception
  254. @pytest.mark.parametrize('ds_name', ['Alkane', 'Acyclic', 'Letter-med', 'AIDS', 'Fingerprint'])
  255. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  256. def test_ShortestPath(ds_name, parallel):
  257. """Test shortest path kernel.
  258. """
  259. from gklearn.kernels import ShortestPath
  260. from gklearn.utils.kernels import deltakernel, gaussiankernel, kernelproduct
  261. import functools
  262. dataset = chooseDataset(ds_name)
  263. mixkernel = functools.partial(kernelproduct, deltakernel, gaussiankernel)
  264. sub_kernels = {'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}
  265. try:
  266. graph_kernel = ShortestPath(node_labels=dataset.node_labels,
  267. node_attrs=dataset.node_attrs,
  268. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  269. fcsp=True,
  270. node_kernels=sub_kernels)
  271. gram_matrix1, run_time = graph_kernel.compute(dataset.graphs,
  272. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  273. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  274. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  275. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  276. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  277. graph_kernel = ShortestPath(node_labels=dataset.node_labels,
  278. node_attrs=dataset.node_attrs,
  279. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  280. fcsp=False,
  281. node_kernels=sub_kernels)
  282. gram_matrix2, run_time = graph_kernel.compute(dataset.graphs,
  283. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  284. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  285. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  286. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  287. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  288. assert np.array_equal(gram_matrix1, gram_matrix2)
  289. except Exception as exception:
  290. assert False, exception
  291. #@pytest.mark.parametrize('ds_name', ['Alkane', 'Acyclic', 'Letter-med', 'AIDS', 'Fingerprint'])
  292. @pytest.mark.parametrize('ds_name', ['Alkane', 'Acyclic', 'Letter-med', 'AIDS', 'Fingerprint', 'Fingerprint_edge', 'Cuneiform'])
  293. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  294. def test_StructuralSP(ds_name, parallel):
  295. """Test structural shortest path kernel.
  296. """
  297. from gklearn.kernels import StructuralSP
  298. from gklearn.utils.kernels import deltakernel, gaussiankernel, kernelproduct
  299. import functools
  300. dataset = chooseDataset(ds_name)
  301. mixkernel = functools.partial(kernelproduct, deltakernel, gaussiankernel)
  302. sub_kernels = {'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}
  303. try:
  304. graph_kernel = StructuralSP(node_labels=dataset.node_labels,
  305. edge_labels=dataset.edge_labels,
  306. node_attrs=dataset.node_attrs,
  307. edge_attrs=dataset.edge_attrs,
  308. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  309. fcsp=True,
  310. node_kernels=sub_kernels,
  311. edge_kernels=sub_kernels)
  312. gram_matrix1, run_time = graph_kernel.compute(dataset.graphs,
  313. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  314. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  315. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  316. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  317. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  318. graph_kernel = StructuralSP(node_labels=dataset.node_labels,
  319. edge_labels=dataset.edge_labels,
  320. node_attrs=dataset.node_attrs,
  321. edge_attrs=dataset.edge_attrs,
  322. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  323. fcsp=False,
  324. node_kernels=sub_kernels,
  325. edge_kernels=sub_kernels)
  326. gram_matrix2, run_time = graph_kernel.compute(dataset.graphs,
  327. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  328. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  329. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  330. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  331. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  332. assert np.array_equal(gram_matrix1, gram_matrix2)
  333. except Exception as exception:
  334. assert False, exception
  335. @pytest.mark.parametrize('ds_name', ['Alkane', 'AIDS'])
  336. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  337. #@pytest.mark.parametrize('k_func', ['MinMax', 'tanimoto', None])
  338. @pytest.mark.parametrize('k_func', ['MinMax', 'tanimoto'])
  339. @pytest.mark.parametrize('compute_method', ['trie', 'naive'])
  340. def test_PathUpToH(ds_name, parallel, k_func, compute_method):
  341. """Test path kernel up to length $h$.
  342. """
  343. from gklearn.kernels import PathUpToH
  344. dataset = chooseDataset(ds_name)
  345. try:
  346. graph_kernel = PathUpToH(node_labels=dataset.node_labels,
  347. edge_labels=dataset.edge_labels,
  348. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  349. depth=2, k_func=k_func, compute_method=compute_method)
  350. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  351. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  352. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  353. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  354. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  355. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  356. except Exception as exception:
  357. assert False, exception
  358. @pytest.mark.parametrize('ds_name', ['Alkane', 'AIDS'])
  359. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  360. def test_Treelet(ds_name, parallel):
  361. """Test treelet kernel.
  362. """
  363. from gklearn.kernels import Treelet
  364. from gklearn.utils.kernels import polynomialkernel
  365. import functools
  366. dataset = chooseDataset(ds_name)
  367. pkernel = functools.partial(polynomialkernel, d=2, c=1e5)
  368. try:
  369. graph_kernel = Treelet(node_labels=dataset.node_labels,
  370. edge_labels=dataset.edge_labels,
  371. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  372. sub_kernel=pkernel)
  373. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  374. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  375. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  376. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  377. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  378. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  379. except Exception as exception:
  380. assert False, exception
  381. @pytest.mark.parametrize('ds_name', ['Acyclic'])
  382. #@pytest.mark.parametrize('base_kernel', ['subtree', 'sp', 'edge'])
  383. # @pytest.mark.parametrize('base_kernel', ['subtree'])
  384. @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  385. def test_WLSubtree(ds_name, parallel):
  386. """Test Weisfeiler-Lehman subtree kernel.
  387. """
  388. from gklearn.kernels import WLSubtree
  389. dataset = chooseDataset(ds_name)
  390. try:
  391. graph_kernel = WLSubtree(node_labels=dataset.node_labels,
  392. edge_labels=dataset.edge_labels,
  393. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  394. height=2)
  395. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  396. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  397. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  398. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  399. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  400. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  401. except Exception as exception:
  402. assert False, exception
  403. if __name__ == "__main__":
  404. test_list_graph_kernels()
  405. # test_spkernel('Alkane', 'imap_unordered')
  406. # test_StructuralSP('Fingerprint_edge', 'imap_unordered')
  407. test_StructuralSP('Acyclic', 'imap_unordered')
  408. # test_WLSubtree('Acyclic', 'imap_unordered')
  409. # test_RandomWalk('Acyclic', 'sylvester', None, 'imap_unordered')
  410. # test_RandomWalk('Acyclic', 'conjugate', None, 'imap_unordered')
  411. # test_RandomWalk('Acyclic', 'fp', None, None)
  412. # test_RandomWalk('Acyclic', 'spectral', 'exp', 'imap_unordered')

A Python package for graph kernels, graph edit distances and graph pre-image problem.