Browse Source

Update exps.

v0.2.x
jajupmochi 4 years ago
parent
commit
01f0b4a489
7 changed files with 133 additions and 18 deletions
  1. +64
    -0
      gklearn/experiments/papers/PRL_2020/runtimes_28cores.py
  2. +13
    -3
      gklearn/experiments/papers/PRL_2020/synthesized_graphs_N.py
  3. +12
    -3
      gklearn/experiments/papers/PRL_2020/synthesized_graphs_degrees.py
  4. +12
    -3
      gklearn/experiments/papers/PRL_2020/synthesized_graphs_num_el.py
  5. +12
    -3
      gklearn/experiments/papers/PRL_2020/synthesized_graphs_num_nl.py
  6. +12
    -3
      gklearn/experiments/papers/PRL_2020/synthesized_graphs_num_nodes.py
  7. +8
    -3
      gklearn/experiments/papers/PRL_2020/utils.py

+ 64
- 0
gklearn/experiments/papers/PRL_2020/runtimes_28cores.py View File

@@ -0,0 +1,64 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 21 10:34:26 2020

@author: ljia
"""
from utils import Graph_Kernel_List, Dataset_List, compute_graph_kernel
from gklearn.utils.graphdataset import load_predefined_dataset
import logging


# def get_graphs(ds_name):
# from gklearn.utils.graph_synthesizer import GraphSynthesizer
# gsyzer = GraphSynthesizer()
# graphs = gsyzer.unified_graphs(num_graphs=100, num_nodes=num_nodes, num_edges=int(num_nodes*2), num_node_labels=0, num_edge_labels=0, seed=None, directed=False)
# return graphs


def xp_runtimes_of_all_7cores():
# Run and save.
import pickle
import os
save_dir = 'outputs/runtimes_of_all_7cores/'
if not os.path.exists(save_dir):
os.makedirs(save_dir)

run_times = {}
for kernel_name in Graph_Kernel_List:
print()
print('Kernel:', kernel_name)
run_times[kernel_name] = []
for ds_name in Dataset_List:
print()
print('Dataset:', ds_name)
# get graphs.
graphs, _ = load_predefined_dataset(ds_name)

# Compute Gram matrix.
try:
gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name, n_jobs=28)
run_times[kernel_name].append(run_time)
except Exception as exp:
run_times[kernel_name].append('error')
print('An exception occured when running this experiment:')
LOG_FILENAME = save_dir + 'error.txt'
logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
logging.exception('')
print(repr(exp))
pickle.dump(run_time, open(save_dir + 'run_time.' + kernel_name + '.' + ds_name + '.pkl', 'wb'))
# Save all.
pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb'))
return


if __name__ == '__main__':
xp_runtimes_of_all_7cores()

+ 13
- 3
gklearn/experiments/papers/PRL_2020/synthesized_graphs_N.py View File

@@ -6,6 +6,7 @@ Created on Mon Sep 21 10:34:26 2020
@author: ljia @author: ljia
""" """
from utils import Graph_Kernel_List, compute_graph_kernel from utils import Graph_Kernel_List, compute_graph_kernel
import logging




def generate_graphs(): def generate_graphs():
@@ -39,10 +40,19 @@ def xp_synthesied_graphs_dataset_size():
print('Number of graphs:', num_graphs) print('Number of graphs:', num_graphs)
sub_graphs = [g.copy() for g in graphs[0:num_graphs]] sub_graphs = [g.copy() for g in graphs[0:num_graphs]]
gram_matrix, run_time = compute_graph_kernel(sub_graphs, kernel_name)
run_times[kernel_name].append(run_time)
pickle.dump(run_times, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_graphs) + '.pkl', 'wb'))
try:
gram_matrix, run_time = compute_graph_kernel(sub_graphs, kernel_name, n_jobs=1)
run_times[kernel_name].append(run_time)
except Exception as exp:
run_times[kernel_name].append('error')
print('An exception occured when running this experiment:')
LOG_FILENAME = save_dir + 'error.txt'
logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
logging.exception('')
print(repr(exp))
pickle.dump(run_time, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_graphs) + '.pkl', 'wb'))
# Save all. # Save all.
pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb')) pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb'))


+ 12
- 3
gklearn/experiments/papers/PRL_2020/synthesized_graphs_degrees.py View File

@@ -6,6 +6,7 @@ Created on Mon Sep 21 10:34:26 2020
@author: ljia @author: ljia
""" """
from utils import Graph_Kernel_List, compute_graph_kernel from utils import Graph_Kernel_List, compute_graph_kernel
import logging




def generate_graphs(degree): def generate_graphs(degree):
@@ -39,10 +40,18 @@ def xp_synthesied_graphs_degrees():
graphs = generate_graphs(degree) graphs = generate_graphs(degree)


# Compute Gram matrix. # Compute Gram matrix.
gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name)
run_times[kernel_name].append(run_time)
try:
gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name, n_jobs=1)
run_times[kernel_name].append(run_time)
except Exception as exp:
run_times[kernel_name].append('error')
print('An exception occured when running this experiment:')
LOG_FILENAME = save_dir + 'error.txt'
logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
logging.exception('')
print(repr(exp))
pickle.dump(run_times, open(save_dir + 'run_time.' + kernel_name + '.' + str(degree) + '.pkl', 'wb'))
pickle.dump(run_time, open(save_dir + 'run_time.' + kernel_name + '.' + str(degree) + '.pkl', 'wb'))
# Save all. # Save all.
pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb')) pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb'))


+ 12
- 3
gklearn/experiments/papers/PRL_2020/synthesized_graphs_num_el.py View File

@@ -6,6 +6,7 @@ Created on Mon Sep 21 10:34:26 2020
@author: ljia @author: ljia
""" """
from utils import Graph_Kernel_List_ESym, compute_graph_kernel from utils import Graph_Kernel_List_ESym, compute_graph_kernel
import logging




def generate_graphs(num_el_alp): def generate_graphs(num_el_alp):
@@ -39,10 +40,18 @@ def xp_synthesied_graphs_num_edge_label_alphabet():
graphs = generate_graphs(num_el_alp) graphs = generate_graphs(num_el_alp)


# Compute Gram matrix. # Compute Gram matrix.
gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name)
run_times[kernel_name].append(run_time)
try:
gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name, n_jobs=1)
run_times[kernel_name].append(run_time)
except Exception as exp:
run_times[kernel_name].append('error')
print('An exception occured when running this experiment:')
LOG_FILENAME = save_dir + 'error.txt'
logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
logging.exception('')
print(repr(exp))
pickle.dump(run_times, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_el_alp) + '.pkl', 'wb'))
pickle.dump(run_time, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_el_alp) + '.pkl', 'wb'))
# Save all. # Save all.
pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb')) pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb'))


+ 12
- 3
gklearn/experiments/papers/PRL_2020/synthesized_graphs_num_nl.py View File

@@ -6,6 +6,7 @@ Created on Mon Sep 21 10:34:26 2020
@author: ljia @author: ljia
""" """
from utils import Graph_Kernel_List_VSym, compute_graph_kernel from utils import Graph_Kernel_List_VSym, compute_graph_kernel
import logging




def generate_graphs(num_nl_alp): def generate_graphs(num_nl_alp):
@@ -39,10 +40,18 @@ def xp_synthesied_graphs_num_node_label_alphabet():
graphs = generate_graphs(num_nl_alp) graphs = generate_graphs(num_nl_alp)


# Compute Gram matrix. # Compute Gram matrix.
gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name)
run_times[kernel_name].append(run_time)
try:
gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name, n_jobs=1)
run_times[kernel_name].append(run_time)
except Exception as exp:
run_times[kernel_name].append('error')
print('An exception occured when running this experiment:')
LOG_FILENAME = save_dir + 'error.txt'
logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
logging.exception('')
print(repr(exp))
pickle.dump(run_times, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_nl_alp) + '.pkl', 'wb'))
pickle.dump(run_time, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_nl_alp) + '.pkl', 'wb'))
# Save all. # Save all.
pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb')) pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb'))


+ 12
- 3
gklearn/experiments/papers/PRL_2020/synthesized_graphs_num_nodes.py View File

@@ -6,6 +6,7 @@ Created on Mon Sep 21 10:34:26 2020
@author: ljia @author: ljia
""" """
from utils import Graph_Kernel_List, compute_graph_kernel from utils import Graph_Kernel_List, compute_graph_kernel
import logging




def generate_graphs(num_nodes): def generate_graphs(num_nodes):
@@ -39,10 +40,18 @@ def xp_synthesied_graphs_num_nodes():
graphs = generate_graphs(num_nodes) graphs = generate_graphs(num_nodes)


# Compute Gram matrix. # Compute Gram matrix.
gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name)
run_times[kernel_name].append(run_time)
try:
gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name, n_jobs=1)
run_times[kernel_name].append(run_time)
except Exception as exp:
run_times[kernel_name].append('error')
print('An exception occured when running this experiment:')
LOG_FILENAME = save_dir + 'error.txt'
logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
logging.exception('')
print(repr(exp))
pickle.dump(run_times, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_nodes) + '.pkl', 'wb'))
pickle.dump(run_time, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_nodes) + '.pkl', 'wb'))
# Save all. # Save all.
pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb')) pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb'))


+ 8
- 3
gklearn/experiments/papers/PRL_2020/utils.py View File

@@ -5,6 +5,9 @@ Created on Tue Sep 22 11:33:28 2020


@author: ljia @author: ljia
""" """
import multiprocessing


Graph_Kernel_List = ['PathUpToH', 'WLSubtree', 'SylvesterEquation', 'Marginalized', 'ShortestPath', 'Treelet', 'ConjugateGradient', 'FixedPoint', 'SpectralDecomposition', 'StructuralSP', 'CommonWalk'] Graph_Kernel_List = ['PathUpToH', 'WLSubtree', 'SylvesterEquation', 'Marginalized', 'ShortestPath', 'Treelet', 'ConjugateGradient', 'FixedPoint', 'SpectralDecomposition', 'StructuralSP', 'CommonWalk']
# Graph_Kernel_List = ['CommonWalk', 'Marginalized', 'SylvesterEquation', 'ConjugateGradient', 'FixedPoint', 'SpectralDecomposition', 'ShortestPath', 'StructuralSP', 'PathUpToH', 'Treelet', 'WLSubtree'] # Graph_Kernel_List = ['CommonWalk', 'Marginalized', 'SylvesterEquation', 'ConjugateGradient', 'FixedPoint', 'SpectralDecomposition', 'ShortestPath', 'StructuralSP', 'PathUpToH', 'Treelet', 'WLSubtree']


@@ -21,8 +24,10 @@ Graph_Kernel_List_VCon = ['ShortestPath', 'ConjugateGradient', 'FixedPoint', 'St
Graph_Kernel_List_ECon = ['ConjugateGradient', 'FixedPoint', 'StructuralSP'] Graph_Kernel_List_ECon = ['ConjugateGradient', 'FixedPoint', 'StructuralSP']




def compute_graph_kernel(graphs, kernel_name):
import multiprocessing
Dataset_List = ['Alkane', 'Acyclic', 'MAO', 'PAH', 'MUTAG', 'Letter-med', 'ENZYMES', 'AIDS', 'NCI1', 'NCI109', 'DD']


def compute_graph_kernel(graphs, kernel_name, n_jobs=multiprocessing.cpu_count()):
if kernel_name == 'CommonWalk': if kernel_name == 'CommonWalk':
from gklearn.kernels.commonWalkKernel import commonwalkkernel from gklearn.kernels.commonWalkKernel import commonwalkkernel
@@ -99,7 +104,7 @@ def compute_graph_kernel(graphs, kernel_name):
params = {'base_kernel': 'subtree', 'height': 5} params = {'base_kernel': 'subtree', 'height': 5}
# params['parallel'] = None # params['parallel'] = None
params['n_jobs'] = multiprocessing.cpu_count()
params['n_jobs'] = n_jobs
params['verbose'] = True params['verbose'] = True
results = estimator(graphs, **params) results = estimator(graphs, **params)

Loading…
Cancel
Save