You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

run_randomwalkkernel.py 6.0 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. """
  4. Created on Thu Nov 22 17:02:28 2018
  5. @author: ljia
  6. """
  7. import functools
  8. from libs import *
  9. import multiprocessing
  10. from pygraph.kernels.randomWalkKernel import randomwalkkernel
  11. from pygraph.utils.kernels import deltakernel, gaussiankernel, kernelproduct
  12. import numpy as np
  13. dslist = [
  14. {'name': 'Acyclic', 'dataset': '../datasets/acyclic/dataset_bps.ds',
  15. 'task': 'regression'}, # node symb
  16. {'name': 'Alkane', 'dataset': '../datasets/Alkane/dataset.ds', 'task': 'regression',
  17. 'dataset_y': '../datasets/Alkane/dataset_boiling_point_names.txt', },
  18. # contains single node graph, node symb
  19. {'name': 'MAO', 'dataset': '../datasets/MAO/dataset.ds', }, # node/edge symb
  20. {'name': 'PAH', 'dataset': '../datasets/PAH/dataset.ds', }, # unlabeled
  21. {'name': 'MUTAG', 'dataset': '../datasets/MUTAG/MUTAG.mat',
  22. 'extra_params': {'am_sp_al_nl_el': [0, 0, 3, 1, 2]}}, # node/edge symb
  23. {'name': 'Letter-med', 'dataset': '../datasets/Letter-med/Letter-med_A.txt'},
  24. # node nsymb
  25. {'name': 'ENZYMES', 'dataset': '../datasets/ENZYMES_txt/ENZYMES_A_sparse.txt'},
  26. # node symb/nsymb
  27. # {'name': 'Mutagenicity', 'dataset': '../datasets/Mutagenicity/Mutagenicity_A.txt'},
  28. # # node/edge symb
  29. # {'name': 'D&D', 'dataset': '../datasets/D&D/DD.mat',
  30. # 'extra_params': {'am_sp_al_nl_el': [0, 1, 2, 1, -1]}}, # node symb
  31. # {'name': 'COIL-DEL', 'dataset': '../datasets/COIL-DEL/COIL-DEL_A.txt'}, # edge symb, node nsymb
  32. # # # {'name': 'BZR', 'dataset': '../datasets/BZR_txt/BZR_A_sparse.txt'}, # node symb/nsymb
  33. # # # {'name': 'COX2', 'dataset': '../datasets/COX2_txt/COX2_A_sparse.txt'}, # node symb/nsymb
  34. # {'name': 'Fingerprint', 'dataset': '../datasets/Fingerprint/Fingerprint_A.txt'},
  35. #
  36. # # {'name': 'DHFR', 'dataset': '../datasets/DHFR_txt/DHFR_A_sparse.txt'}, # node symb/nsymb
  37. # # {'name': 'SYNTHETIC', 'dataset': '../datasets/SYNTHETIC_txt/SYNTHETIC_A_sparse.txt'}, # node symb/nsymb
  38. # {'name': 'MSRC9', 'dataset': '../datasets/MSRC_9_txt/MSRC_9_A.txt'}, # node symb, missing values
  39. # {'name': 'MSRC21', 'dataset': '../datasets/MSRC_21_txt/MSRC_21_A.txt'}, # node symb, missing values
  40. # # {'name': 'FIRSTMM_DB', 'dataset': '../datasets/FIRSTMM_DB/FIRSTMM_DB_A.txt'}, # node symb/nsymb ,edge nsymb
  41. # # {'name': 'PROTEINS', 'dataset': '../datasets/PROTEINS_txt/PROTEINS_A_sparse.txt'}, # node symb/nsymb
  42. # # {'name': 'PROTEINS_full', 'dataset': '../datasets/PROTEINS_full_txt/PROTEINS_full_A_sparse.txt'}, # node symb/nsymb
  43. # # {'name': 'AIDS', 'dataset': '../datasets/AIDS/AIDS_A.txt'}, # node symb/nsymb, edge symb
  44. # {'name': 'NCI1', 'dataset': '../datasets/NCI1/NCI1.mat',
  45. # 'extra_params': {'am_sp_al_nl_el': [1, 1, 2, 0, -1]}}, # node symb
  46. # {'name': 'NCI109', 'dataset': '../datasets/NCI109/NCI109.mat',
  47. # 'extra_params': {'am_sp_al_nl_el': [1, 1, 2, 0, -1]}}, # node symb
  48. # {'name': 'NCI-HIV', 'dataset': '../datasets/NCI-HIV/AIDO99SD.sdf',
  49. # 'dataset_y': '../datasets/NCI-HIV/aids_conc_may04.txt',}, # node/edge symb
  50. # # not working below
  51. # {'name': 'PTC_FM', 'dataset': '../datasets/PTC/Train/FM.ds',},
  52. # {'name': 'PTC_FR', 'dataset': '../datasets/PTC/Train/FR.ds',},
  53. # {'name': 'PTC_MM', 'dataset': '../datasets/PTC/Train/MM.ds',},
  54. # {'name': 'PTC_MR', 'dataset': '../datasets/PTC/Train/MR.ds',},
  55. ]
  56. estimator = randomwalkkernel
  57. param_grid = [{'C': np.logspace(-10, 10, num=41, base=10)},
  58. {'alpha': np.logspace(-10, 10, num=41, base=10)}]
  59. for ds in dslist:
  60. print()
  61. print(ds['name'])
  62. for compute_method in ['sylvester', 'conjugate', 'fp', 'spectral']:
  63. if compute_method == 'sylvester':
  64. param_grid_precomputed = {'compute_method': ['sylvester'],
  65. # 'weight': np.linspace(0.01, 0.10, 10)}
  66. 'weight': np.logspace(-1, -10, num=10, base=10)}
  67. elif compute_method == 'conjugate':
  68. mixkernel = functools.partial(kernelproduct, deltakernel, gaussiankernel)
  69. param_grid_precomputed = {'compute_method': ['conjugate'],
  70. 'node_kernels':
  71. [{'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}],
  72. 'edge_kernels':
  73. [{'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}],
  74. 'weight': np.logspace(-1, -10, num=10, base=10)}
  75. elif compute_method == 'fp':
  76. mixkernel = functools.partial(kernelproduct, deltakernel, gaussiankernel)
  77. param_grid_precomputed = {'compute_method': ['fp'],
  78. 'node_kernels':
  79. [{'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}],
  80. 'edge_kernels':
  81. [{'symb': deltakernel, 'nsymb': gaussiankernel, 'mix': mixkernel}],
  82. 'weight': np.logspace(-3, -10, num=8, base=10)}
  83. elif compute_method == 'spectral':
  84. param_grid_precomputed = {'compute_method': ['spectral'],
  85. 'weight': np.logspace(-1, -10, num=10, base=10),
  86. 'sub_kernel': ['geo', 'exp']}
  87. model_selection_for_precomputed_kernel(
  88. ds['dataset'],
  89. estimator,
  90. param_grid_precomputed,
  91. (param_grid[1] if ('task' in ds and ds['task']
  92. == 'regression') else param_grid[0]),
  93. (ds['task'] if 'task' in ds else 'classification'),
  94. NUM_TRIALS=30,
  95. datafile_y=(ds['dataset_y'] if 'dataset_y' in ds else None),
  96. extra_params=(ds['extra_params'] if 'extra_params' in ds else None),
  97. ds_name=ds['name'],
  98. n_jobs=multiprocessing.cpu_count(),
  99. read_gm_from_file=False)
  100. print()

A Python package for graph kernels, graph edit distances and graph pre-image problem.