diff --git a/README.md b/README.md index 386d9a4..085084d 100644 --- a/README.md +++ b/README.md @@ -12,12 +12,12 @@ A Python package for graph kernels, graph edit distances and graph pre-image pro * python>=3.5 * numpy>=1.16.2 * scipy>=1.1.0 -* matplotlib>=3.0.0 +* matplotlib>=3.1.0 * networkx>=2.2 * scikit-learn>=0.20.0 * tabulate>=0.8.2 * tqdm>=4.26.0 -* control==0.8.0 (for generalized random walk kernels only) +* control>=0.8.2 (for generalized random walk kernels only) * slycot==0.3.3 (for generalized random walk kernels only, which requires a fortran compiler, gfortran for example) ## How to use? diff --git a/gklearn/experiments/papers/PRL_2020/synthesized_graphs_num_el.py b/gklearn/experiments/papers/PRL_2020/synthesized_graphs_num_el.py new file mode 100644 index 0000000..2341ba9 --- /dev/null +++ b/gklearn/experiments/papers/PRL_2020/synthesized_graphs_num_el.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Mon Sep 21 10:34:26 2020 + +@author: ljia +""" +from utils import Graph_Kernel_List_ESym, compute_graph_kernel + + +def generate_graphs(num_el_alp): + from gklearn.utils.graph_synthesizer import GraphSynthesizer + gsyzer = GraphSynthesizer() + graphs = gsyzer.unified_graphs(num_graphs=100, num_nodes=20, num_edges=40, num_node_labels=0, num_edge_labels=num_el_alp, seed=None, directed=False) + return graphs + + +def xp_synthesied_graphs_num_edge_label_alphabet(): + + # Run and save. + import pickle + import os + save_dir = 'outputs/synthesized_graphs_num_edge_label_alphabet/' + if not os.path.exists(save_dir): + os.makedirs(save_dir) + + run_times = {} + + for kernel_name in Graph_Kernel_List_ESym: + print() + print('Kernel:', kernel_name) + + run_times[kernel_name] = [] + for num_el_alp in [0, 4, 8, 12, 16, 20, 24, 28, 32, 36, 40]: + print() + print('Number of edge label alphabet:', num_el_alp) + + # Generate graphs. + graphs = generate_graphs(num_el_alp) + + # Compute Gram matrix. + gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name) + run_times[kernel_name].append(run_time) + + pickle.dump(run_times, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_el_alp) + '.pkl', 'wb')) + + # Save all. + pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb')) + + return + + +if __name__ == '__main__': + xp_synthesied_graphs_num_edge_label_alphabet() diff --git a/gklearn/experiments/papers/PRL_2020/synthesized_graphs_num_nl.py b/gklearn/experiments/papers/PRL_2020/synthesized_graphs_num_nl.py index fbe0fa3..005ab35 100644 --- a/gklearn/experiments/papers/PRL_2020/synthesized_graphs_num_nl.py +++ b/gklearn/experiments/papers/PRL_2020/synthesized_graphs_num_nl.py @@ -31,7 +31,7 @@ def xp_synthesied_graphs_num_node_label_alphabet(): print('Kernel:', kernel_name) run_times[kernel_name] = [] - for num_nl_alp in [0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20]: # [0, 4, 8, 12, 16, 20, 24, 28, 32, 36, 40]: + for num_nl_alp in [0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20]: print() print('Number of node label alphabet:', num_nl_alp) diff --git a/gklearn/utils/graph_synthesizer.py b/gklearn/utils/graph_synthesizer.py index 581ebc0..2c5f650 100644 --- a/gklearn/utils/graph_synthesizer.py +++ b/gklearn/utils/graph_synthesizer.py @@ -29,9 +29,9 @@ class GraphSynthesizer(object): if num_edge_labels > 0: edge_labels = np.random.randint(0, high=num_edge_labels, size=num_edges) - for i in random.sample(range(0, max_num_edges), num_edges): + for idx, i in enumerate(random.sample(range(0, max_num_edges), num_edges)): node1, node2 = all_edges[i] - g.add_edge(str(node1), str(node2), bond_type=edge_labels[i]) # @todo: update "bond_type". + g.add_edge(str(node1), str(node2), bond_type=edge_labels[idx]) # @todo: update "bond_type". else: for i in random.sample(range(0, max_num_edges), num_edges): node1, node2 = all_edges[i]