You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

run_vertex_differs_ssp.py 3.5 kB

5 years ago
5 years ago
5 years ago
12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. """
  4. Created on Tue Jan 8 16:23:39 2019
  5. @author: ljia
  6. """
  7. import sys
  8. import numpy as np
  9. import networkx as nx
  10. sys.path.insert(0, "../../")
  11. from gklearn.utils.graphfiles import loadDataset
  12. from gklearn.utils.model_selection_precomputed import compute_gram_matrices
  13. from sklearn.model_selection import ParameterGrid
  14. from libs import *
  15. import functools
  16. import multiprocessing
  17. from gklearn.utils.kernels import deltakernel, gaussiankernel, kernelproduct
  18. dslist = [
  19. # {'name': 'Acyclic', 'dataset': '../datasets/acyclic/dataset_bps.ds',
  20. # 'task': 'regression'}, # node symb
  21. # {'name': 'Alkane', 'dataset': '../datasets/Alkane/dataset.ds', 'task': 'regression',
  22. # 'dataset_y': '../datasets/Alkane/dataset_boiling_point_names.txt', },
  23. # # contains single node graph, node symb
  24. # {'name': 'MAO', 'dataset': '../datasets/MAO/dataset.ds', }, # node/edge symb
  25. # {'name': 'PAH', 'dataset': '../datasets/PAH/dataset.ds', }, # unlabeled
  26. # {'name': 'MUTAG', 'dataset': '../datasets/MUTAG/MUTAG.mat',
  27. # 'extra_params': {'am_sp_al_nl_el': [0, 0, 3, 1, 2]}}, # node/edge symb
  28. # {'name': 'Letter-med', 'dataset': '../datasets/Letter-med/Letter-med_A.txt'},
  29. # # node nsymb
  30. {'name': 'ENZYMES', 'dataset': '../../datasets/ENZYMES_txt/ENZYMES_A_sparse.txt'},
  31. # node symb/nsymb
  32. ]
  33. def run_ms(dataset, y, ds):
  34. from gklearn.kernels.structuralspKernel import structuralspkernel
  35. estimator = structuralspkernel
  36. mixkernel = functools.partial(kernelproduct, deltakernel, gaussiankernel)
  37. param_grid_precomputed = {'node_kernels':
  38. [{'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}],
  39. 'edge_kernels':
  40. [{'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}]}
  41. param_grid = [{'C': np.logspace(-10, 10, num=41, base=10)},
  42. {'alpha': np.logspace(-10, 10, num=41, base=10)}]
  43. _, gram_matrix_time, _, _, _ = compute_gram_matrices(
  44. dataset, y, estimator, list(ParameterGrid(param_grid_precomputed)),
  45. '../../notebooks/results/' + estimator.__name__, ds['name'],
  46. n_jobs=multiprocessing.cpu_count(), verbose=False)
  47. average_gram_matrix_time = np.mean(gram_matrix_time)
  48. std_gram_matrix_time = np.std(gram_matrix_time, ddof=1)
  49. print('\n***** time to calculate gram matrix with different hyper-params: {:.2f}±{:.2f}s'
  50. .format(average_gram_matrix_time, std_gram_matrix_time))
  51. print()
  52. return average_gram_matrix_time, std_gram_matrix_time
  53. for ds in dslist:
  54. print()
  55. print(ds['name'])
  56. Gn, y_all = loadDataset(
  57. ds['dataset'], filename_y=(ds['dataset_y'] if 'dataset_y' in ds else None),
  58. extra_params=(ds['extra_params'] if 'extra_params' in ds else None))
  59. vn_list = [nx.number_of_nodes(g) for g in Gn]
  60. idx_sorted = np.argsort(vn_list)
  61. vn_list.sort()
  62. Gn = [Gn[idx] for idx in idx_sorted]
  63. y_all = [y_all[idx] for idx in idx_sorted]
  64. len_1piece = int(len(Gn) / 5)
  65. ave_time = []
  66. std_time = []
  67. for piece in range(4, 5):
  68. print('piece', str(piece), ':')
  69. Gn_p = Gn[len_1piece * piece:len_1piece * (piece + 1)]
  70. y_all_p = y_all[len_1piece * piece:len_1piece * (piece + 1)]
  71. avet, stdt = run_ms(Gn_p, y_all_p, ds)
  72. ave_time.append(avet)
  73. std_time.append(stdt)
  74. print('\n****** for dataset', ds['name'], ', the average time is \n', ave_time,
  75. '\nthe time std is \n', std_time)
  76. print()

A Python package for graph kernels, graph edit distances and graph pre-image problem.