Browse Source

Fix bugs.

v0.2.x
jajupmochi 4 years ago
parent
commit
87e9da8b65
7 changed files with 96 additions and 8 deletions
  1. +21
    -0
      Problems.md
  2. +54
    -0
      gklearn/experiments/papers/PRL_2020/synthesized_graphs_num_nl.py
  3. +14
    -1
      gklearn/experiments/papers/PRL_2020/utils.py
  4. +1
    -1
      gklearn/kernels/untilHPathKernel.py
  5. +4
    -4
      gklearn/utils/graph_synthesizer.py
  6. +1
    -1
      requirements.txt
  7. +1
    -1
      requirements_pypi.txt

+ 21
- 0
Problems.md View File

@@ -0,0 +1,21 @@
# About graph kenrels.

## (Random walk) Sylvester equation kernel.

### ImportError: cannot import name 'frange' from 'matplotlib.mlab'

You are using an outdated `control` with a recent `matplotlib`. `mlab.frange` was removed in `matplotlib-3.1.0`, and `control` removed the call in `control-0.8.2`.

Update your `control` package.

### Intel MKL FATAL ERROR: Cannot load libmkl_avx2.so or libmkl_def.so.

The Intel Math Kernel Library (MKL) is missing or not properly set. I assume the MKL is required by `control` module.

Install MKL. Then add the following to your path:

```
export PATH=/opt/intel/bin:$PATH

export LD_LIBRARY_PATH=/opt/intel/lib/intel64:/opt/intel/mkl/lib/intel64:$LD_LIBRARY_PATH
```

+ 54
- 0
gklearn/experiments/papers/PRL_2020/synthesized_graphs_num_nl.py View File

@@ -0,0 +1,54 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 21 10:34:26 2020

@author: ljia
"""
from utils import Graph_Kernel_List_VSym, compute_graph_kernel


def generate_graphs(num_nl_alp):
from gklearn.utils.graph_synthesizer import GraphSynthesizer
gsyzer = GraphSynthesizer()
graphs = gsyzer.unified_graphs(num_graphs=100, num_nodes=20, num_edges=40, num_node_labels=num_nl_alp, num_edge_labels=0, seed=None, directed=False)
return graphs


def xp_synthesied_graphs_num_node_label_alphabet():
# Run and save.
import pickle
import os
save_dir = 'outputs/synthesized_graphs_num_node_label_alphabet/'
if not os.path.exists(save_dir):
os.makedirs(save_dir)

run_times = {}
for kernel_name in Graph_Kernel_List_VSym:
print()
print('Kernel:', kernel_name)
run_times[kernel_name] = []
for num_nl_alp in [0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20]: # [0, 4, 8, 12, 16, 20, 24, 28, 32, 36, 40]:
print()
print('Number of node label alphabet:', num_nl_alp)
# Generate graphs.
graphs = generate_graphs(num_nl_alp)

# Compute Gram matrix.
gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name)
run_times[kernel_name].append(run_time)
pickle.dump(run_times, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_nl_alp) + '.pkl', 'wb'))
# Save all.
pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb'))
return


if __name__ == '__main__':
xp_synthesied_graphs_num_node_label_alphabet()

+ 14
- 1
gklearn/experiments/papers/PRL_2020/utils.py View File

@@ -5,10 +5,22 @@ Created on Tue Sep 22 11:33:28 2020


@author: ljia @author: ljia
""" """
Graph_Kernel_List = ['PathUpToH', 'WLSubtree', 'SylvesterEquation', 'Marginalized', 'ShortestPath', 'Treelet', 'ConjugateGradient', 'FixedPoint', 'SpectralDecomposition', 'StructuralSP', 'CommonWalk']
Graph_Kernel_List = ['PathUpToH', 'WLSubtree', 'SylvesterEquation', 'Marginalized', 'ShortestPath', 'Treelet', 'ConjugateGradient', 'FixedPoint', 'SpectralDecomposition', 'StructuralSP', 'CommonWalk']
# Graph_Kernel_List = ['CommonWalk', 'Marginalized', 'SylvesterEquation', 'ConjugateGradient', 'FixedPoint', 'SpectralDecomposition', 'ShortestPath', 'StructuralSP', 'PathUpToH', 'Treelet', 'WLSubtree'] # Graph_Kernel_List = ['CommonWalk', 'Marginalized', 'SylvesterEquation', 'ConjugateGradient', 'FixedPoint', 'SpectralDecomposition', 'ShortestPath', 'StructuralSP', 'PathUpToH', 'Treelet', 'WLSubtree']




Graph_Kernel_List_VSym = ['PathUpToH', 'WLSubtree', 'Marginalized', 'ShortestPath', 'Treelet', 'ConjugateGradient', 'FixedPoint', 'StructuralSP', 'CommonWalk']


Graph_Kernel_List_ESym = ['PathUpToH', 'Marginalized', 'Treelet', 'ConjugateGradient', 'FixedPoint', 'StructuralSP', 'CommonWalk']


Graph_Kernel_List_VCon = ['ShortestPath', 'ConjugateGradient', 'FixedPoint', 'StructuralSP']


Graph_Kernel_List_ECon = ['ConjugateGradient', 'FixedPoint', 'StructuralSP']


def compute_graph_kernel(graphs, kernel_name): def compute_graph_kernel(graphs, kernel_name):
import multiprocessing import multiprocessing
@@ -86,6 +98,7 @@ def compute_graph_kernel(graphs, kernel_name):
estimator = weisfeilerlehmankernel estimator = weisfeilerlehmankernel
params = {'base_kernel': 'subtree', 'height': 5} params = {'base_kernel': 'subtree', 'height': 5}
# params['parallel'] = None
params['n_jobs'] = multiprocessing.cpu_count() params['n_jobs'] = multiprocessing.cpu_count()
params['verbose'] = True params['verbose'] = True
results = estimator(graphs, **params) results = estimator(graphs, **params)


+ 1
- 1
gklearn/kernels/untilHPathKernel.py View File

@@ -649,7 +649,7 @@ def paths2labelseqs(plist, G, ds_attrs, node_label, edge_label):
# path_strs.append(tuple(strlist)) # path_strs.append(tuple(strlist))
else: else:
path_strs = [ path_strs = [
tuple([G.node[node][node_label] for node in path])
tuple([G.nodes[node][node_label] for node in path])
for path in plist for path in plist
] ]
return path_strs return path_strs


+ 4
- 4
gklearn/utils/graph_synthesizer.py View File

@@ -20,9 +20,9 @@ class GraphSynthesizer(object):
def random_graph(self, num_nodes, num_edges, num_node_labels=0, num_edge_labels=0, seed=None, directed=False, max_num_edges=None, all_edges=None): def random_graph(self, num_nodes, num_edges, num_node_labels=0, num_edge_labels=0, seed=None, directed=False, max_num_edges=None, all_edges=None):
g = nx.Graph() g = nx.Graph()
if num_node_labels > 0: if num_node_labels > 0:
node_labels = np.random.randint(0, high=num_node_labels, size=num_nodes)
for i in range(0, num_nodes): for i in range(0, num_nodes):
node_labels = np.random.randint(0, high=num_node_labels, size=num_nodes)
g.add_node(str(i), node_label=node_labels[i])
g.add_node(str(i), atom=node_labels[i]) # @todo: update "atom".
else: else:
for i in range(0, num_nodes): for i in range(0, num_nodes):
g.add_node(str(i)) g.add_node(str(i))
@@ -31,11 +31,11 @@ class GraphSynthesizer(object):
edge_labels = np.random.randint(0, high=num_edge_labels, size=num_edges) edge_labels = np.random.randint(0, high=num_edge_labels, size=num_edges)
for i in random.sample(range(0, max_num_edges), num_edges): for i in random.sample(range(0, max_num_edges), num_edges):
node1, node2 = all_edges[i] node1, node2 = all_edges[i]
g.add_edge(node1, node2, edge_label=edge_labels[i])
g.add_edge(str(node1), str(node2), bond_type=edge_labels[i]) # @todo: update "bond_type".
else: else:
for i in random.sample(range(0, max_num_edges), num_edges): for i in random.sample(range(0, max_num_edges), num_edges):
node1, node2 = all_edges[i] node1, node2 = all_edges[i]
g.add_edge(node1, node2)
g.add_edge(str(node1), str(node2))
return g return g


+ 1
- 1
requirements.txt View File

@@ -1,6 +1,6 @@
numpy>=1.16.2 numpy>=1.16.2
scipy>=1.1.0 scipy>=1.1.0
matplotlib>=3.0.0
matplotlib>=3.1.0
networkx>=2.2 networkx>=2.2
scikit-learn>=0.20.0 scikit-learn>=0.20.0
tabulate>=0.8.2 tabulate>=0.8.2


+ 1
- 1
requirements_pypi.txt View File

@@ -1,6 +1,6 @@
numpy>=1.16.2 numpy>=1.16.2
scipy>=1.1.0 scipy>=1.1.0
matplotlib>=3.0.0
matplotlib>=3.1.0
networkx>=2.2 networkx>=2.2
scikit-learn>=0.20.0 scikit-learn>=0.20.0
tabulate>=0.8.2 tabulate>=0.8.2


Loading…
Cancel
Save