You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_graph_kernels.py 20 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541
  1. """Tests of graph kernels.
  2. """
  3. import pytest
  4. import multiprocessing
  5. import numpy as np
  6. ##############################################################################
  7. def test_list_graph_kernels():
  8. """
  9. """
  10. from gklearn.kernels import GRAPH_KERNELS, list_of_graph_kernels
  11. assert list_of_graph_kernels() == [i for i in GRAPH_KERNELS]
  12. ##############################################################################
  13. def chooseDataset(ds_name):
  14. """Choose dataset according to name.
  15. """
  16. from gklearn.dataset import Dataset
  17. # no node labels (and no edge labels).
  18. if ds_name == 'Alkane':
  19. dataset = Dataset('Alkane_unlabeled')
  20. dataset.trim_dataset(edge_required=False)
  21. dataset.cut_graphs(range(1, 10))
  22. # node symbolic labels.
  23. elif ds_name == 'Acyclic':
  24. dataset = Dataset('Acyclic')
  25. dataset.trim_dataset(edge_required=False)
  26. # node non-symbolic labels.
  27. elif ds_name == 'Letter-med':
  28. dataset = Dataset('Letter-med')
  29. dataset.trim_dataset(edge_required=False)
  30. # node symbolic and non-symbolic labels (and edge symbolic labels).
  31. elif ds_name == 'AIDS':
  32. dataset = Dataset('AIDS')
  33. dataset.trim_dataset(edge_required=False)
  34. # edge non-symbolic labels (no node labels).
  35. elif ds_name == 'Fingerprint_edge':
  36. dataset = Dataset('Fingerprint')
  37. dataset.trim_dataset(edge_required=True)
  38. irrelevant_labels = {'edge_attrs': ['orient', 'angle']}
  39. dataset.remove_labels(**irrelevant_labels)
  40. # edge non-symbolic labels (and node non-symbolic labels).
  41. elif ds_name == 'Fingerprint':
  42. dataset = Dataset('Fingerprint')
  43. dataset.trim_dataset(edge_required=True)
  44. # edge symbolic and non-symbolic labels (and node symbolic and non-symbolic labels).
  45. elif ds_name == 'Cuneiform':
  46. dataset = Dataset('Cuneiform')
  47. dataset.trim_dataset(edge_required=True)
  48. dataset.cut_graphs(range(0, 3))
  49. return dataset
  50. def assert_equality(compute_fun, **kwargs):
  51. """Check if outputs are the same using different methods to compute.
  52. Parameters
  53. ----------
  54. compute_fun : function
  55. The function to compute the kernel, with the same key word arguments as
  56. kwargs.
  57. **kwargs : dict
  58. The key word arguments over the grid of which the kernel results are
  59. compared.
  60. Returns
  61. -------
  62. None.
  63. """
  64. from sklearn.model_selection import ParameterGrid
  65. param_grid = ParameterGrid(kwargs)
  66. result_lists = [[], [], []]
  67. for params in list(param_grid):
  68. results = compute_fun(**params)
  69. for rs, lst in zip(results, result_lists):
  70. lst.append(rs)
  71. for lst in result_lists:
  72. for i in range(len(lst[:-1])):
  73. assert np.array_equal(lst[i], lst[i + 1])
  74. @pytest.mark.parametrize('ds_name', ['Alkane', 'AIDS'])
  75. @pytest.mark.parametrize('weight,compute_method', [(0.01, 'geo'), (1, 'exp')])
  76. # @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  77. def test_CommonWalk(ds_name, weight, compute_method):
  78. """Test common walk kernel.
  79. """
  80. def compute(parallel=None):
  81. from gklearn.kernels import CommonWalk
  82. import networkx as nx
  83. dataset = chooseDataset(ds_name)
  84. dataset.load_graphs([g for g in dataset.graphs if nx.number_of_nodes(g) > 1])
  85. try:
  86. graph_kernel = CommonWalk(node_labels=dataset.node_labels,
  87. edge_labels=dataset.edge_labels,
  88. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  89. weight=weight,
  90. compute_method=compute_method)
  91. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  92. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  93. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  94. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  95. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  96. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  97. except Exception as exception:
  98. assert False, exception
  99. else:
  100. return gram_matrix, kernel_list, kernel
  101. assert_equality(compute, parallel=['imap_unordered', None])
  102. @pytest.mark.parametrize('ds_name', ['Alkane', 'AIDS'])
  103. @pytest.mark.parametrize('remove_totters', [False]) #[True, False])
  104. # @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  105. def test_Marginalized(ds_name, remove_totters):
  106. """Test marginalized kernel.
  107. """
  108. def compute(parallel=None):
  109. from gklearn.kernels import Marginalized
  110. dataset = chooseDataset(ds_name)
  111. try:
  112. graph_kernel = Marginalized(node_labels=dataset.node_labels,
  113. edge_labels=dataset.edge_labels,
  114. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  115. p_quit=0.5,
  116. n_iteration=2,
  117. remove_totters=remove_totters)
  118. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  119. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  120. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  121. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  122. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  123. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  124. except Exception as exception:
  125. assert False, exception
  126. else:
  127. return gram_matrix, kernel_list, kernel
  128. assert_equality(compute, parallel=['imap_unordered', None])
  129. @pytest.mark.parametrize('ds_name', ['Acyclic'])
  130. # @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  131. def test_SylvesterEquation(ds_name):
  132. """Test sylvester equation kernel.
  133. """
  134. def compute(parallel=None):
  135. from gklearn.kernels import SylvesterEquation
  136. dataset = chooseDataset(ds_name)
  137. try:
  138. graph_kernel = SylvesterEquation(
  139. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  140. weight=1e-3,
  141. p=None,
  142. q=None,
  143. edge_weight=None)
  144. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  145. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  146. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  147. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  148. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  149. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  150. except Exception as exception:
  151. assert False, exception
  152. else:
  153. return gram_matrix, kernel_list, kernel
  154. assert_equality(compute, parallel=['imap_unordered', None])
  155. @pytest.mark.parametrize('ds_name', ['Acyclic', 'AIDS'])
  156. # @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  157. def test_ConjugateGradient(ds_name):
  158. """Test conjugate gradient kernel.
  159. """
  160. def compute(parallel=None):
  161. from gklearn.kernels import ConjugateGradient
  162. from gklearn.utils.kernels import deltakernel, gaussiankernel, kernelproduct
  163. import functools
  164. dataset = chooseDataset(ds_name)
  165. mixkernel = functools.partial(kernelproduct, deltakernel, gaussiankernel)
  166. sub_kernels = {'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}
  167. try:
  168. graph_kernel = ConjugateGradient(
  169. node_labels=dataset.node_labels,
  170. node_attrs=dataset.node_attrs,
  171. edge_labels=dataset.edge_labels,
  172. edge_attrs=dataset.edge_attrs,
  173. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  174. weight=1e-3,
  175. p=None,
  176. q=None,
  177. edge_weight=None,
  178. node_kernels=sub_kernels,
  179. edge_kernels=sub_kernels)
  180. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  181. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  182. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  183. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  184. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  185. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  186. except Exception as exception:
  187. assert False, exception
  188. else:
  189. return gram_matrix, kernel_list, kernel
  190. assert_equality(compute, parallel=['imap_unordered', None])
  191. @pytest.mark.parametrize('ds_name', ['Acyclic', 'AIDS'])
  192. # @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  193. def test_FixedPoint(ds_name):
  194. """Test fixed point kernel.
  195. """
  196. def compute(parallel=None):
  197. from gklearn.kernels import FixedPoint
  198. from gklearn.utils.kernels import deltakernel, gaussiankernel, kernelproduct
  199. import functools
  200. dataset = chooseDataset(ds_name)
  201. mixkernel = functools.partial(kernelproduct, deltakernel, gaussiankernel)
  202. sub_kernels = {'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}
  203. try:
  204. graph_kernel = FixedPoint(
  205. node_labels=dataset.node_labels,
  206. node_attrs=dataset.node_attrs,
  207. edge_labels=dataset.edge_labels,
  208. edge_attrs=dataset.edge_attrs,
  209. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  210. weight=1e-3,
  211. p=None,
  212. q=None,
  213. edge_weight=None,
  214. node_kernels=sub_kernels,
  215. edge_kernels=sub_kernels)
  216. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  217. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  218. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  219. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  220. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  221. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  222. except Exception as exception:
  223. assert False, exception
  224. else:
  225. return gram_matrix, kernel_list, kernel
  226. assert_equality(compute, parallel=['imap_unordered', None])
  227. @pytest.mark.parametrize('ds_name', ['Acyclic'])
  228. @pytest.mark.parametrize('sub_kernel', ['exp', 'geo'])
  229. # @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  230. def test_SpectralDecomposition(ds_name, sub_kernel):
  231. """Test spectral decomposition kernel.
  232. """
  233. def compute(parallel=None):
  234. from gklearn.kernels import SpectralDecomposition
  235. dataset = chooseDataset(ds_name)
  236. try:
  237. graph_kernel = SpectralDecomposition(
  238. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  239. weight=1e-3,
  240. p=None,
  241. q=None,
  242. edge_weight=None,
  243. sub_kernel=sub_kernel)
  244. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  245. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  246. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  247. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  248. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  249. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  250. except Exception as exception:
  251. assert False, exception
  252. else:
  253. return gram_matrix, kernel_list, kernel
  254. assert_equality(compute, parallel=['imap_unordered', None])
  255. # @pytest.mark.parametrize(
  256. # 'compute_method,ds_name,sub_kernel',
  257. # [
  258. # ('sylvester', 'Alkane', None),
  259. # ('conjugate', 'Alkane', None),
  260. # ('conjugate', 'AIDS', None),
  261. # ('fp', 'Alkane', None),
  262. # ('fp', 'AIDS', None),
  263. # ('spectral', 'Alkane', 'exp'),
  264. # ('spectral', 'Alkane', 'geo'),
  265. # ]
  266. # )
  267. # @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  268. # def test_RandomWalk(ds_name, compute_method, sub_kernel, parallel):
  269. # """Test random walk kernel.
  270. # """
  271. # from gklearn.kernels import RandomWalk
  272. # from gklearn.utils.kernels import deltakernel, gaussiankernel, kernelproduct
  273. # import functools
  274. #
  275. # dataset = chooseDataset(ds_name)
  276. # mixkernel = functools.partial(kernelproduct, deltakernel, gaussiankernel)
  277. # sub_kernels = {'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}
  278. # # try:
  279. # graph_kernel = RandomWalk(node_labels=dataset.node_labels,
  280. # node_attrs=dataset.node_attrs,
  281. # edge_labels=dataset.edge_labels,
  282. # edge_attrs=dataset.edge_attrs,
  283. # ds_infos=dataset.get_dataset_infos(keys=['directed']),
  284. # compute_method=compute_method,
  285. # weight=1e-3,
  286. # p=None,
  287. # q=None,
  288. # edge_weight=None,
  289. # node_kernels=sub_kernels,
  290. # edge_kernels=sub_kernels,
  291. # sub_kernel=sub_kernel)
  292. # gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  293. # parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  294. # kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  295. # parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  296. # kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  297. # parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  298. # except Exception as exception:
  299. # assert False, exception
  300. @pytest.mark.parametrize('ds_name', ['Alkane', 'Acyclic', 'Letter-med', 'AIDS', 'Fingerprint'])
  301. # @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  302. def test_ShortestPath(ds_name):
  303. """Test shortest path kernel.
  304. """
  305. def compute(parallel=None, fcsp=None):
  306. from gklearn.kernels import ShortestPath
  307. from gklearn.utils.kernels import deltakernel, gaussiankernel, kernelproduct
  308. import functools
  309. dataset = chooseDataset(ds_name)
  310. mixkernel = functools.partial(kernelproduct, deltakernel, gaussiankernel)
  311. sub_kernels = {'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}
  312. try:
  313. graph_kernel = ShortestPath(node_labels=dataset.node_labels,
  314. node_attrs=dataset.node_attrs,
  315. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  316. fcsp=fcsp,
  317. node_kernels=sub_kernels)
  318. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  319. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  320. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  321. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  322. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  323. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  324. except Exception as exception:
  325. assert False, exception
  326. else:
  327. return gram_matrix, kernel_list, kernel
  328. assert_equality(compute, parallel=['imap_unordered', None], fcsp=[True, False])
  329. #@pytest.mark.parametrize('ds_name', ['Alkane', 'Acyclic', 'Letter-med', 'AIDS', 'Fingerprint'])
  330. @pytest.mark.parametrize('ds_name', ['Alkane', 'Acyclic', 'Letter-med', 'AIDS', 'Fingerprint', 'Fingerprint_edge', 'Cuneiform'])
  331. # @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  332. def test_StructuralSP(ds_name):
  333. """Test structural shortest path kernel.
  334. """
  335. def compute(parallel=None, fcsp=None):
  336. from gklearn.kernels import StructuralSP
  337. from gklearn.utils.kernels import deltakernel, gaussiankernel, kernelproduct
  338. import functools
  339. dataset = chooseDataset(ds_name)
  340. mixkernel = functools.partial(kernelproduct, deltakernel, gaussiankernel)
  341. sub_kernels = {'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}
  342. try:
  343. graph_kernel = StructuralSP(node_labels=dataset.node_labels,
  344. edge_labels=dataset.edge_labels,
  345. node_attrs=dataset.node_attrs,
  346. edge_attrs=dataset.edge_attrs,
  347. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  348. fcsp=fcsp,
  349. node_kernels=sub_kernels,
  350. edge_kernels=sub_kernels)
  351. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  352. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  353. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  354. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  355. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  356. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  357. except Exception as exception:
  358. assert False, exception
  359. else:
  360. return gram_matrix, kernel_list, kernel
  361. assert_equality(compute, parallel=['imap_unordered', None], fcsp=[True, False])
  362. @pytest.mark.parametrize('ds_name', ['Alkane', 'AIDS'])
  363. # @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  364. #@pytest.mark.parametrize('k_func', ['MinMax', 'tanimoto', None])
  365. @pytest.mark.parametrize('k_func', ['MinMax', 'tanimoto'])
  366. # @pytest.mark.parametrize('compute_method', ['trie', 'naive'])
  367. def test_PathUpToH(ds_name, k_func):
  368. """Test path kernel up to length $h$.
  369. """
  370. def compute(parallel=None, compute_method=None):
  371. from gklearn.kernels import PathUpToH
  372. dataset = chooseDataset(ds_name)
  373. try:
  374. graph_kernel = PathUpToH(node_labels=dataset.node_labels,
  375. edge_labels=dataset.edge_labels,
  376. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  377. depth=2, k_func=k_func, compute_method=compute_method)
  378. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  379. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  380. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  381. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  382. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  383. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  384. except Exception as exception:
  385. assert False, exception
  386. else:
  387. return gram_matrix, kernel_list, kernel
  388. assert_equality(compute, parallel=['imap_unordered', None],
  389. compute_method=['trie', 'naive'])
  390. @pytest.mark.parametrize('ds_name', ['Alkane', 'AIDS'])
  391. # @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  392. def test_Treelet(ds_name):
  393. """Test treelet kernel.
  394. """
  395. def compute(parallel=None):
  396. from gklearn.kernels import Treelet
  397. from gklearn.utils.kernels import polynomialkernel
  398. import functools
  399. dataset = chooseDataset(ds_name)
  400. pkernel = functools.partial(polynomialkernel, d=2, c=1e5)
  401. try:
  402. graph_kernel = Treelet(node_labels=dataset.node_labels,
  403. edge_labels=dataset.edge_labels,
  404. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  405. sub_kernel=pkernel)
  406. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  407. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  408. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  409. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  410. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  411. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  412. except Exception as exception:
  413. assert False, exception
  414. else:
  415. return gram_matrix, kernel_list, kernel
  416. assert_equality(compute, parallel=['imap_unordered', None])
  417. @pytest.mark.parametrize('ds_name', ['Acyclic'])
  418. #@pytest.mark.parametrize('base_kernel', ['subtree', 'sp', 'edge'])
  419. # @pytest.mark.parametrize('base_kernel', ['subtree'])
  420. # @pytest.mark.parametrize('parallel', ['imap_unordered', None])
  421. def test_WLSubtree(ds_name):
  422. """Test Weisfeiler-Lehman subtree kernel.
  423. """
  424. def compute(parallel=None):
  425. from gklearn.kernels import WLSubtree
  426. dataset = chooseDataset(ds_name)
  427. try:
  428. graph_kernel = WLSubtree(node_labels=dataset.node_labels,
  429. edge_labels=dataset.edge_labels,
  430. ds_infos=dataset.get_dataset_infos(keys=['directed']),
  431. height=2)
  432. gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
  433. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  434. kernel_list, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1:],
  435. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  436. kernel, run_time = graph_kernel.compute(dataset.graphs[0], dataset.graphs[1],
  437. parallel=parallel, n_jobs=multiprocessing.cpu_count(), verbose=True)
  438. except Exception as exception:
  439. assert False, exception
  440. else:
  441. return gram_matrix, kernel_list, kernel
  442. assert_equality(compute, parallel=['imap_unordered', None])
  443. if __name__ == "__main__":
  444. test_list_graph_kernels()
  445. # test_spkernel('Alkane', 'imap_unordered')
  446. # test_ShortestPath('Alkane')
  447. # test_StructuralSP('Fingerprint_edge', 'imap_unordered')
  448. # test_StructuralSP('Alkane', None)
  449. # test_StructuralSP('Cuneiform', None)
  450. # test_WLSubtree('Acyclic', 'imap_unordered')
  451. # test_RandomWalk('Acyclic', 'sylvester', None, 'imap_unordered')
  452. # test_RandomWalk('Acyclic', 'conjugate', None, 'imap_unordered')
  453. # test_RandomWalk('Acyclic', 'fp', None, None)
  454. # test_RandomWalk('Acyclic', 'spectral', 'exp', 'imap_unordered')
  455. # test_CommonWalk('Alkane', 0.01, 'geo')
  456. # test_ShortestPath('Acyclic')

A Python package for graph kernels, graph edit distances and graph pre-image problem.