@@ -12,23 +12,19 @@ import networkx as nx | |||||
from tqdm import tqdm | from tqdm import tqdm | ||||
import sys | import sys | ||||
#from Cython_GedLib_2 import librariesImport, script | |||||
import librariesImport, script | |||||
from gedlibpy import librariesImport, gedlibpy | |||||
sys.path.insert(0, "../") | sys.path.insert(0, "../") | ||||
from pygraph.utils.graphfiles import saveDataset | |||||
from pygraph.utils.graphdataset import get_dataset_attributes | from pygraph.utils.graphdataset import get_dataset_attributes | ||||
from pygraph.utils.utils import graph_isIdentical, get_node_labels, get_edge_labels | from pygraph.utils.utils import graph_isIdentical, get_node_labels, get_edge_labels | ||||
#from pygraph.utils.utils import graph_deepcopy | |||||
def iam_moreGraphsAsInit_tryAllPossibleBestGraphs(Gn_median, Gn_candidate, | |||||
c_ei=3, c_er=3, c_es=1, ite_max=50, epsilon=0.001, | |||||
node_label='atom', edge_label='bond_type', | |||||
def iam_upgraded(Gn_median, Gn_candidate, c_ei=3, c_er=3, c_es=1, ite_max=50, | |||||
epsilon=0.001, node_label='atom', edge_label='bond_type', | |||||
connected=False, removeNodes=True, allBestInit=False, allBestNodes=False, | connected=False, removeNodes=True, allBestInit=False, allBestNodes=False, | ||||
allBestEdges=False, | |||||
allBestEdges=False, allBestOutput=False, | |||||
params_ged={'ged_cost': 'CHEM_1', 'ged_method': 'IPFP', 'saveGXL': 'benoit'}): | params_ged={'ged_cost': 'CHEM_1', 'ged_method': 'IPFP', 'saveGXL': 'benoit'}): | ||||
"""See my name, then you know what I do. | """See my name, then you know what I do. | ||||
""" | """ | ||||
from tqdm import tqdm | |||||
# Gn_median = Gn_median[0:10] | # Gn_median = Gn_median[0:10] | ||||
# Gn_median = [nx.convert_node_labels_to_integers(g) for g in Gn_median] | # Gn_median = [nx.convert_node_labels_to_integers(g) for g in Gn_median] | ||||
if removeNodes: | if removeNodes: | ||||
@@ -150,16 +146,6 @@ def iam_moreGraphsAsInit_tryAllPossibleBestGraphs(Gn_median, Gn_candidate, | |||||
h_ij0 += h_ij0_p | h_ij0 += h_ij0_p | ||||
h_ij0_list.append(h_ij0) | h_ij0_list.append(h_ij0) | ||||
label_list.append(label) | label_list.append(label) | ||||
# # case when the edge is to be removed. | |||||
# h_ij0_remove = 0 | |||||
# for idx, g in enumerate(Gn_median): | |||||
# pi_i = pi_p_forward[idx][nd1i] | |||||
# pi_j = pi_p_forward[idx][nd2i] | |||||
# if g.has_node(pi_i) and g.has_node(pi_j) and not | |||||
# g.has_edge(pi_i, pi_j): | |||||
# h_ij0_remove += 1 | |||||
# h_ij0_list.append(h_ij0_remove) | |||||
# label_list.append(label_r) | |||||
# get the best labels. | # get the best labels. | ||||
idx_max = np.argwhere(h_ij0_list == np.max(h_ij0_list)).flatten().tolist() | idx_max = np.argwhere(h_ij0_list == np.max(h_ij0_list)).flatten().tolist() | ||||
@@ -370,7 +356,9 @@ def iam_moreGraphsAsInit_tryAllPossibleBestGraphs(Gn_median, Gn_candidate, | |||||
idx_list.append(idx) | idx_list.append(idx) | ||||
return Gn_new, idx_list | return Gn_new, idx_list | ||||
########################################################################### | |||||
# phase 1: initilize. | # phase 1: initilize. | ||||
# compute set-median. | # compute set-median. | ||||
dis_min = np.inf | dis_min = np.inf | ||||
@@ -421,8 +409,6 @@ def iam_moreGraphsAsInit_tryAllPossibleBestGraphs(Gn_median, Gn_candidate, | |||||
# print(g.edges(data=True)) | # print(g.edges(data=True)) | ||||
# get the best median graphs | # get the best median graphs | ||||
# dis_list, pi_forward_list = median_distance(G_list, Gn_median, | |||||
# **params_ged) | |||||
G_min_list, pi_forward_min_list, dis_min = best_median_graphs( | G_min_list, pi_forward_min_list, dis_min = best_median_graphs( | ||||
G_list, pi_forward_list, dis_list) | G_list, pi_forward_list, dis_list) | ||||
# for g in G_min_list: | # for g in G_min_list: | ||||
@@ -430,9 +416,11 @@ def iam_moreGraphsAsInit_tryAllPossibleBestGraphs(Gn_median, Gn_candidate, | |||||
# plt.show() | # plt.show() | ||||
# print(g.nodes(data=True)) | # print(g.nodes(data=True)) | ||||
# print(g.edges(data=True)) | # print(g.edges(data=True)) | ||||
# randomly choose one graph. | |||||
idx_rdm = random.randint(0, len(G_min_list) - 1) | |||||
G_min_list = [G_min_list[idx_rdm]] | |||||
if not allBestOutput: | |||||
# randomly choose one graph. | |||||
idx_rdm = random.randint(0, len(G_min_list) - 1) | |||||
G_min_list = [G_min_list[idx_rdm]] | |||||
return G_min_list, dis_min | return G_min_list, dis_min | ||||
@@ -445,13 +433,91 @@ def iam_moreGraphsAsInit_tryAllPossibleBestGraphs(Gn_median, Gn_candidate, | |||||
############################################################################### | |||||
# Useful functions. | |||||
def GED(g1, g2, lib='gedlibpy', cost='CHEM_1', method='IPFP', saveGXL='benoit', | |||||
stabilizer='min'): | |||||
""" | |||||
Compute GED. | |||||
""" | |||||
if lib == 'gedlibpy': | |||||
def convertGraph(G): | |||||
"""Convert a graph to the proper NetworkX format that can be | |||||
recognized by library gedlibpy. | |||||
""" | |||||
G_new = nx.Graph() | |||||
for nd, attrs in G.nodes(data=True): | |||||
G_new.add_node(str(nd), chem=attrs['atom']) | |||||
for nd1, nd2, attrs in G.edges(data=True): | |||||
# G_new.add_edge(str(nd1), str(nd2), valence=attrs['bond_type']) | |||||
G_new.add_edge(str(nd1), str(nd2)) | |||||
return G_new | |||||
gedlibpy.restart_env() | |||||
gedlibpy.add_nx_graph(convertGraph(g1), "") | |||||
gedlibpy.add_nx_graph(convertGraph(g2), "") | |||||
listID = gedlibpy.get_all_graph_ids() | |||||
gedlibpy.set_edit_cost(cost) | |||||
gedlibpy.init() | |||||
gedlibpy.set_method(method, "") | |||||
gedlibpy.init_method() | |||||
g = listID[0] | |||||
h = listID[1] | |||||
if stabilizer == None: | |||||
gedlibpy.run_method(g, h) | |||||
pi_forward = gedlibpy.get_forward_map(g, h) | |||||
pi_backward = gedlibpy.get_backward_map(g, h) | |||||
upper = gedlibpy.get_upper_bound(g, h) | |||||
lower = gedlibpy.get_lower_bound(g, h) | |||||
elif stabilizer == 'min': | |||||
upper = np.inf | |||||
for itr in range(50): | |||||
gedlibpy.run_method(g, h) | |||||
upper_tmp = gedlibpy.get_upper_bound(g, h) | |||||
if upper_tmp < upper: | |||||
upper = upper_tmp | |||||
pi_forward = gedlibpy.get_forward_map(g, h) | |||||
pi_backward = gedlibpy.get_backward_map(g, h) | |||||
lower = gedlibpy.get_lower_bound(g, h) | |||||
if upper == 0: | |||||
break | |||||
dis = upper | |||||
# make the map label correct (label remove map as np.inf) | |||||
nodes1 = [n for n in g1.nodes()] | |||||
nodes2 = [n for n in g2.nodes()] | |||||
nb1 = nx.number_of_nodes(g1) | |||||
nb2 = nx.number_of_nodes(g2) | |||||
pi_forward = [nodes2[pi] if pi < nb2 else np.inf for pi in pi_forward] | |||||
pi_backward = [nodes1[pi] if pi < nb1 else np.inf for pi in pi_backward] | |||||
return dis, pi_forward, pi_backward | |||||
def median_distance(Gn, Gn_median, measure='ged', verbose=False, | |||||
ged_cost='CHEM_1', ged_method='IPFP', saveGXL='benoit'): | |||||
dis_list = [] | |||||
pi_forward_list = [] | |||||
for idx, G in tqdm(enumerate(Gn), desc='computing median distances', | |||||
file=sys.stdout) if verbose else enumerate(Gn): | |||||
dis_sum = 0 | |||||
pi_forward_list.append([]) | |||||
for G_p in Gn_median: | |||||
dis_tmp, pi_tmp_forward, pi_tmp_backward = GED(G, G_p, | |||||
cost=ged_cost, method=ged_method, saveGXL=saveGXL) | |||||
pi_forward_list[idx].append(pi_tmp_forward) | |||||
dis_sum += dis_tmp | |||||
dis_list.append(dis_sum) | |||||
return dis_list, pi_forward_list | |||||
############################################################################### | ############################################################################### | ||||
# Old implementations. | |||||
def iam(Gn, c_ei=3, c_er=3, c_es=1, node_label='atom', edge_label='bond_type', | def iam(Gn, c_ei=3, c_er=3, c_es=1, node_label='atom', edge_label='bond_type', | ||||
connected=True): | connected=True): | ||||
@@ -579,73 +645,6 @@ def iam(Gn, c_ei=3, c_er=3, c_es=1, node_label='atom', edge_label='bond_type', | |||||
return G | return G | ||||
def GED(g1, g2, lib='gedlib', cost='CHEM_1', method='IPFP', saveGXL='benoit', | |||||
stabilizer='min'): | |||||
""" | |||||
Compute GED. | |||||
""" | |||||
if lib == 'gedlib': | |||||
# transform dataset to the 'xml' file as the GedLib required. | |||||
saveDataset([g1, g2], [None, None], group='xml', filename='ged_tmp/tmp', | |||||
xparams={'method': saveGXL}) | |||||
# script.appel() | |||||
script.PyRestartEnv() | |||||
script.PyLoadGXLGraph('ged_tmp/', 'ged_tmp/tmp.xml') | |||||
listID = script.PyGetGraphIds() | |||||
script.PySetEditCost(cost) #("CHEM_1") | |||||
script.PyInitEnv() | |||||
script.PySetMethod(method, "") | |||||
script.PyInitMethod() | |||||
g = listID[0] | |||||
h = listID[1] | |||||
if stabilizer == None: | |||||
script.PyRunMethod(g, h) | |||||
pi_forward, pi_backward = script.PyGetAllMap(g, h) | |||||
upper = script.PyGetUpperBound(g, h) | |||||
lower = script.PyGetLowerBound(g, h) | |||||
elif stabilizer == 'min': | |||||
upper = np.inf | |||||
for itr in range(50): | |||||
script.PyRunMethod(g, h) | |||||
upper_tmp = script.PyGetUpperBound(g, h) | |||||
if upper_tmp < upper: | |||||
upper = upper_tmp | |||||
pi_forward, pi_backward = script.PyGetAllMap(g, h) | |||||
lower = script.PyGetLowerBound(g, h) | |||||
if upper == 0: | |||||
break | |||||
dis = upper | |||||
# make the map label correct (label remove map as np.inf) | |||||
nodes1 = [n for n in g1.nodes()] | |||||
nodes2 = [n for n in g2.nodes()] | |||||
nb1 = nx.number_of_nodes(g1) | |||||
nb2 = nx.number_of_nodes(g2) | |||||
pi_forward = [nodes2[pi] if pi < nb2 else np.inf for pi in pi_forward] | |||||
pi_backward = [nodes1[pi] if pi < nb1 else np.inf for pi in pi_backward] | |||||
return dis, pi_forward, pi_backward | |||||
def median_distance(Gn, Gn_median, measure='ged', verbose=False, | |||||
ged_cost='CHEM_1', ged_method='IPFP', saveGXL='benoit'): | |||||
dis_list = [] | |||||
pi_forward_list = [] | |||||
for idx, G in tqdm(enumerate(Gn), desc='computing median distances', | |||||
file=sys.stdout) if verbose else enumerate(Gn): | |||||
dis_sum = 0 | |||||
pi_forward_list.append([]) | |||||
for G_p in Gn_median: | |||||
dis_tmp, pi_tmp_forward, pi_tmp_backward = GED(G, G_p, | |||||
cost=ged_cost, method=ged_method, saveGXL=saveGXL) | |||||
pi_forward_list[idx].append(pi_tmp_forward) | |||||
dis_sum += dis_tmp | |||||
dis_list.append(dis_sum) | |||||
return dis_list, pi_forward_list | |||||
# --------------------------- These are tests --------------------------------# | # --------------------------- These are tests --------------------------------# | ||||
def test_iam_with_more_graphs_as_init(Gn, G_candidate, c_ei=3, c_er=3, c_es=1, | def test_iam_with_more_graphs_as_init(Gn, G_candidate, c_ei=3, c_er=3, c_es=1, | ||||
@@ -785,9 +784,6 @@ def test_iam_with_more_graphs_as_init(Gn, G_candidate, c_ei=3, c_er=3, c_es=1, | |||||
############################################################################### | ############################################################################### | ||||
if __name__ == '__main__': | if __name__ == '__main__': | ||||
from pygraph.utils.graphfiles import loadDataset | from pygraph.utils.graphfiles import loadDataset | ||||
ds = {'name': 'MUTAG', 'dataset': '../datasets/MUTAG/MUTAG.mat', | ds = {'name': 'MUTAG', 'dataset': '../datasets/MUTAG/MUTAG.mat', | ||||
@@ -1,5 +0,0 @@ | |||||
from ctypes import * | |||||
lib1 = cdll.LoadLibrary('lib/fann/libdoublefann.so') | |||||
lib2 = cdll.LoadLibrary('lib/libsvm.3.22/libsvm.so') | |||||
lib3 = cdll.LoadLibrary('lib/nomad/libnomad.so') | |||||
lib4 = cdll.LoadLibrary('lib/nomad/libsgtelib.so') |
@@ -1,5 +0,0 @@ | |||||
from ctypes import * | |||||
lib1 = cdll.LoadLibrary('Cython_GedLib_2/lib/fann/libdoublefann.so') | |||||
lib2 = cdll.LoadLibrary('Cython_GedLib_2/lib/libsvm.3.22/libsvm.so') | |||||
lib3 = cdll.LoadLibrary('Cython_GedLib_2/lib/nomad/libnomad.so') | |||||
lib4 = cdll.LoadLibrary('Cython_GedLib_2/lib/nomad/libsgtelib.so') |
@@ -57,7 +57,7 @@ def compute_kernel(Gn, graph_kernel, verbose): | |||||
return Kmatrix | return Kmatrix | ||||
def random_preimage(Gn_init, Gn_median, alpha, idx_gi, Kmatrix, k, r_max, l, gkernel): | |||||
def preimage_random(Gn_init, Gn_median, alpha, idx_gi, Kmatrix, k, r_max, l, gkernel): | |||||
Gn_init = [nx.convert_node_labels_to_integers(g) for g in Gn_init] | Gn_init = [nx.convert_node_labels_to_integers(g) for g in Gn_init] | ||||
# compute k nearest neighbors of phi in DN. | # compute k nearest neighbors of phi in DN. |
@@ -402,7 +402,7 @@ def test_iam_letter_h(): | |||||
def test_random_preimage_letter_h(): | def test_random_preimage_letter_h(): | ||||
from preimage import random_preimage, compute_kernel | |||||
from preimage_random import preimage_random, compute_kernel | |||||
ds = {'name': 'Letter-high', 'dataset': '../datasets/Letter-high/Letter-high_A.txt', | ds = {'name': 'Letter-high', 'dataset': '../datasets/Letter-high/Letter-high_A.txt', | ||||
'extra_params': {}} # node nsymb | 'extra_params': {}} # node nsymb | ||||
# ds = {'name': 'Letter-med', 'dataset': '../datasets/Letter-med/Letter-med_A.txt', | # ds = {'name': 'Letter-med', 'dataset': '../datasets/Letter-med/Letter-med_A.txt', | ||||
@@ -443,7 +443,7 @@ def test_random_preimage_letter_h(): | |||||
# for each alpha | # for each alpha | ||||
for alpha in alpha_range: | for alpha in alpha_range: | ||||
print('alpha =', alpha) | print('alpha =', alpha) | ||||
dhat, ghat_list = random_preimage(Gn_let, Gn_let, [alpha] * len(Gn_let), | |||||
dhat, ghat_list = preimage_random(Gn_let, Gn_let, [alpha] * len(Gn_let), | |||||
range(len(Gn_let), len(Gn_mix)), km, | range(len(Gn_let), len(Gn_mix)), km, | ||||
k, r_max, gkernel, c_ei=1.7, | k, r_max, gkernel, c_ei=1.7, | ||||
c_er=1.7, c_es=1.7) | c_er=1.7, c_es=1.7) | ||||
@@ -1,26 +0,0 @@ | |||||
#from distutils.core import setup | |||||
from distutils.extension import Extension | |||||
#from Cython.Distutils import build_ext | |||||
from distutils.core import setup | |||||
from Cython.Build import cythonize | |||||
#setup(ext_modules=cythonize("script.pyx")) | |||||
extensions = [Extension("script", | |||||
sources=["script.pyx", "src/essai.cpp"], | |||||
include_dirs=["include","include/lsape", "include/Eigen", "include/nomad", "include/sgtelib", "include/libsvm.3.22", "include/fann", "include/boost_1_69_0"], | |||||
library_dirs=["lib/fann","lib/gedlib", "lib/libsvm.3.22","lib/nomad"], | |||||
libraries=["doublefann","sgtelib", "svm", "nomad"], | |||||
language="c++", | |||||
extra_compile_args=["-std=c++11"], | |||||
extra_link_args=["-std=c++11"])] | |||||
setup(ext_modules=cythonize(extensions)) | |||||
#extensions = [Extension("script", sources=["script.pyx", "include/gedlib-master/src/env/ged_env.ipp"], include_dirs=["."], language="c++")] | |||||
#setup(name = "script", ext_modules = extensions, cmdclass = {'build_ext':build_ext},) | |||||
# Commande Bash : python setup.py build_ext --inplace |
@@ -1,57 +1,84 @@ | |||||
#export LD_LIBRARY_PATH=.:/export/home/lambertn/Documents/Cython_GedLib_2/lib/fann/:/export/home/lambertn/Documents/Cython_GedLib_2/lib/libsvm.3.22:/export/home/lambertn/Documents/Cython_GedLib_2/lib/nomad | |||||
#export LD_LIBRARY_PATH=.:/export/home/lambertn/Documents/gedlibpy/lib/fann/:/export/home/lambertn/Documents/gedlibpy/lib/libsvm.3.22:/export/home/lambertn/Documents/gedlibpy/lib/nomad | |||||
#Pour que "import script" trouve les librairies qu'a besoin GedLib | #Pour que "import script" trouve les librairies qu'a besoin GedLib | ||||
#Equivalent à définir la variable d'environnement LD_LIBRARY_PATH sur un bash | #Equivalent à définir la variable d'environnement LD_LIBRARY_PATH sur un bash | ||||
#Permet de fonctionner sur Idle et autre sans définir à chaque fois la variable d'environnement | |||||
#os.environ ne fonctionne pas dans ce cas | |||||
import librariesImport, script | |||||
import gedlibpy.librariesImport | |||||
from gedlibpy import gedlibpy | |||||
import networkx as nx | |||||
#import script | |||||
#truc = script.computeEditDistanceOnGXlGraphs('include/gedlib-master/data/datasets/Mutagenicity/data/','collections/MUTA_10.xml',"CHEM_1", "BIPARTITE", "") | |||||
#print(truc) | |||||
#script.PyRestartEnv() | |||||
#script.appel() | |||||
def test() : | |||||
# script.appel() | |||||
script.PyRestartEnv() | |||||
print("Here is the Python function !") | |||||
def init() : | |||||
print("List of Edit Cost Options : ") | print("List of Edit Cost Options : ") | ||||
for i in script.listOfEditCostOptions : | |||||
for i in gedlibpy.list_of_edit_cost_options : | |||||
print (i) | print (i) | ||||
print("") | print("") | ||||
print("List of Method Options : ") | print("List of Method Options : ") | ||||
for j in script.listOfMethodOptions : | |||||
for j in gedlibpy.list_of_method_options : | |||||
print (j) | print (j) | ||||
print("") | print("") | ||||
print("List of Init Options : ") | |||||
for k in gedlibpy.list_of_init_options : | |||||
print (k) | |||||
print("") | |||||
script.PyLoadGXLGraph('include/gedlib-master/data/datasets/Mutagenicity/data/', 'collections/MUTA_10.xml') | |||||
listID = script.PyGetGraphIds() | |||||
def test(): | |||||
afficheId = "" | |||||
for i in listID : | |||||
afficheId+=str(i) + " " | |||||
print("Number of graphs = " + str(len(listID)) + ", list of Ids = " + afficheId) | |||||
gedlibpy.load_GXL_graphs('include/gedlib-master/data/datasets/Mutagenicity/data/', 'collections/MUTA_10.xml') | |||||
listID = gedlibpy.get_all_graph_ids() | |||||
gedlibpy.set_edit_cost("CHEM_1") | |||||
gedlibpy.init() | |||||
gedlibpy.set_method("IPFP", "") | |||||
gedlibpy.init_method() | |||||
g = listID[0] | |||||
h = listID[1] | |||||
gedlibpy.run_method(g, h) | |||||
print("Node Map : ", gedlibpy.get_node_map(g,h)) | |||||
print("Forward map : " , gedlibpy.get_forward_map(g, h), ", Backward map : ", gedlibpy.get_backward_map(g, h)) | |||||
print("Assignment Matrix : ") | |||||
print(gedlibpy.get_assignment_matrix(g, h)) | |||||
print ("Upper Bound = " + str(gedlibpy.get_upper_bound(g,h)) + ", Lower Bound = " + str(gedlibpy.get_lower_bound(g, h)) + ", Runtime = " + str(gedlibpy.get_runtime(g, h))) | |||||
script.PySetEditCost("CHEM_1") | |||||
script.PyInitEnv() | |||||
def convertGraph(G): | |||||
G_new = nx.Graph() | |||||
for nd, attrs in G.nodes(data=True): | |||||
G_new.add_node(str(nd), chem=attrs['atom']) | |||||
for nd1, nd2, attrs in G.edges(data=True): | |||||
G_new.add_edge(str(nd1), str(nd2), valence=attrs['bond_type']) | |||||
return G_new | |||||
script.PySetMethod("BIPARTITE", "") | |||||
script.PyInitMethod() | |||||
def testNxGrapĥ(): | |||||
import sys | |||||
sys.path.insert(0, "../") | |||||
from pygraph.utils.graphfiles import loadDataset | |||||
ds = {'name': 'MUTAG', 'dataset': '../datasets/MUTAG/MUTAG_A.txt', | |||||
'extra_params': {}} # node/edge symb | |||||
Gn, y_all = loadDataset(ds['dataset'], extra_params=ds['extra_params']) | |||||
gedlibpy.restart_env() | |||||
for graph in Gn: | |||||
g_new = convertGraph(graph) | |||||
gedlibpy.add_nx_graph(g_new, "") | |||||
listID = gedlibpy.get_all_graph_ids() | |||||
gedlibpy.set_edit_cost("CHEM_1") | |||||
gedlibpy.init() | |||||
gedlibpy.set_method("IPFP", "") | |||||
gedlibpy.init_method() | |||||
print(listID) | |||||
g = listID[0] | g = listID[0] | ||||
h = listID[1] | h = listID[1] | ||||
script.PyRunMethod(g,h) | |||||
liste = script.PyGetAllMap(g,h) | |||||
print("Forward map : " ,liste[0], ", Backward map : ", liste[1]) | |||||
print ("Upper Bound = " + str(script.PyGetUpperBound(g,h)) + ", Lower Bound = " + str(script.PyGetLowerBound(g,h)) + ", Runtime = " + str(script.PyGetRuntime(g,h))) | |||||
gedlibpy.run_method(g, h) | |||||
print("Node Map : ", gedlibpy.get_node_map(g, h)) | |||||
print("Forward map : " , gedlibpy.get_forward_map(g, h), ", Backward map : ", gedlibpy.get_backward_map(g, h)) | |||||
print ("Upper Bound = " + str(gedlibpy.get_upper_bound(g, h)) + ", Lower Bound = " + str(gedlibpy.get_lower_bound(g, h)) + ", Runtime = " + str(gedlibpy.get_runtime(g, h))) | |||||
test() | |||||
init() | |||||
#testNxGrapĥ() |
@@ -22,7 +22,7 @@ from pygraph.utils.graphfiles import loadDataset | |||||
# random pre-image paper.) | # random pre-image paper.) | ||||
def test_preimage_mix_2combination_all_pairs(): | def test_preimage_mix_2combination_all_pairs(): | ||||
from gk_iam import preimage_iam_random_mix, compute_kernel | |||||
from preimage_iam import preimage_iam_random_mix, compute_kernel | |||||
from iam import median_distance | from iam import median_distance | ||||
ds = {'name': 'MUTAG', 'dataset': '../datasets/MUTAG/MUTAG_A.txt', | ds = {'name': 'MUTAG', 'dataset': '../datasets/MUTAG/MUTAG_A.txt', | ||||
'extra_params': {}} # node/edge symb | 'extra_params': {}} # node/edge symb | ||||
@@ -37,6 +37,8 @@ def test_preimage_mix_2combination_all_pairs(): | |||||
alpha_range = np.linspace(0.5, 0.5, 1) | alpha_range = np.linspace(0.5, 0.5, 1) | ||||
k = 5 # k nearest neighbors | k = 5 # k nearest neighbors | ||||
epsilon = 1e-6 | epsilon = 1e-6 | ||||
InitIAMWithAllDk = True | |||||
InitRandomWithAllDk = True | |||||
# parameters for GED function | # parameters for GED function | ||||
ged_cost='CHEM_1' | ged_cost='CHEM_1' | ||||
ged_method='IPFP' | ged_method='IPFP' | ||||
@@ -127,7 +129,8 @@ def test_preimage_mix_2combination_all_pairs(): | |||||
nb_updated_k_iam, nb_updated_k_random = \ | nb_updated_k_iam, nb_updated_k_random = \ | ||||
preimage_iam_random_mix(Gn, [g1, g2], | preimage_iam_random_mix(Gn, [g1, g2], | ||||
[alpha, 1 - alpha], range(len(Gn), len(Gn) + 2), km, k, r_max, | [alpha, 1 - alpha], range(len(Gn), len(Gn) + 2), km, k, r_max, | ||||
l_max, gkernel, epsilon=epsilon, | |||||
l_max, gkernel, epsilon=epsilon, InitIAMWithAllDk=InitIAMWithAllDk, | |||||
InitRandomWithAllDk=InitRandomWithAllDk, | |||||
params_iam={'c_ei': c_ei, 'c_er': c_er, 'c_es': c_es, | params_iam={'c_ei': c_ei, 'c_er': c_er, 'c_es': c_es, | ||||
'ite_max': ite_max_iam, 'epsilon': epsilon_iam, | 'ite_max': ite_max_iam, 'epsilon': epsilon_iam, | ||||
'removeNodes': removeNodes, 'connected': connected_iam}, | 'removeNodes': removeNodes, 'connected': connected_iam}, | ||||
@@ -194,7 +197,7 @@ def test_preimage_mix_2combination_all_pairs(): | |||||
def test_gkiam_2combination_all_pairs(): | def test_gkiam_2combination_all_pairs(): | ||||
from gk_iam import gk_iam_nearest_multi, compute_kernel | |||||
from preimage_iam import preimage_iam, compute_kernel | |||||
from iam import median_distance | from iam import median_distance | ||||
ds = {'name': 'MUTAG', 'dataset': '../datasets/MUTAG/MUTAG_A.txt', | ds = {'name': 'MUTAG', 'dataset': '../datasets/MUTAG/MUTAG_A.txt', | ||||
'extra_params': {}} # node/edge symb | 'extra_params': {}} # node/edge symb | ||||
@@ -206,8 +209,9 @@ def test_gkiam_2combination_all_pairs(): | |||||
lmbda = 0.03 # termination probalility | lmbda = 0.03 # termination probalility | ||||
r_max = 10 # iteration limit for pre-image. | r_max = 10 # iteration limit for pre-image. | ||||
alpha_range = np.linspace(0.5, 0.5, 1) | alpha_range = np.linspace(0.5, 0.5, 1) | ||||
k = 10 # k nearest neighbors | |||||
k = 5 # k nearest neighbors | |||||
epsilon = 1e-6 | epsilon = 1e-6 | ||||
InitIAMWithAllDk = False | |||||
# parameters for GED function | # parameters for GED function | ||||
ged_cost='CHEM_1' | ged_cost='CHEM_1' | ||||
ged_method='IPFP' | ged_method='IPFP' | ||||
@@ -292,9 +296,9 @@ def test_gkiam_2combination_all_pairs(): | |||||
print('alpha =', alpha) | print('alpha =', alpha) | ||||
time0 = time.time() | time0 = time.time() | ||||
dhat, ghat_list, sod_ks, nb_updated, nb_updated_k = \ | dhat, ghat_list, sod_ks, nb_updated, nb_updated_k = \ | ||||
gk_iam_nearest_multi(Gn, [g1, g2], | |||||
preimage_iam(Gn, [g1, g2], | |||||
[alpha, 1 - alpha], range(len(Gn), len(Gn) + 2), km, k, r_max, | [alpha, 1 - alpha], range(len(Gn), len(Gn) + 2), km, k, r_max, | ||||
gkernel, epsilon=epsilon, | |||||
gkernel, epsilon=epsilon, InitIAMWithAllDk=InitIAMWithAllDk, | |||||
params_iam={'c_ei': c_ei, 'c_er': c_er, 'c_es': c_es, | params_iam={'c_ei': c_ei, 'c_er': c_er, 'c_es': c_es, | ||||
'ite_max': ite_max_iam, 'epsilon': epsilon_iam, | 'ite_max': ite_max_iam, 'epsilon': epsilon_iam, | ||||
'removeNodes': removeNodes, 'connected': connected_iam}, | 'removeNodes': removeNodes, 'connected': connected_iam}, | ||||
@@ -463,7 +467,7 @@ def test_gkiam_2combination(): | |||||
def test_random_preimage_2combination(): | def test_random_preimage_2combination(): | ||||
# from gk_iam import compute_kernel | # from gk_iam import compute_kernel | ||||
from preimage import random_preimage | |||||
from preimage_random import preimage_random | |||||
ds = {'name': 'MUTAG', 'dataset': '../datasets/MUTAG/MUTAG_A.txt', | ds = {'name': 'MUTAG', 'dataset': '../datasets/MUTAG/MUTAG_A.txt', | ||||
'extra_params': {}} # node/edge symb | 'extra_params': {}} # node/edge symb | ||||
Gn, y_all = loadDataset(ds['dataset'], extra_params=ds['extra_params']) | Gn, y_all = loadDataset(ds['dataset'], extra_params=ds['extra_params']) | ||||
@@ -535,7 +539,7 @@ def test_random_preimage_2combination(): | |||||
print('\n-------------------------------------------------------\n') | print('\n-------------------------------------------------------\n') | ||||
print('alpha =', alpha) | print('alpha =', alpha) | ||||
time0 = time.time() | time0 = time.time() | ||||
dhat, ghat, nb_updated = random_preimage(Gn, [g1, g2], [alpha, 1 - alpha], | |||||
dhat, ghat, nb_updated = preimage_random(Gn, [g1, g2], [alpha, 1 - alpha], | |||||
range(len(Gn), len(Gn) + 2), km, | range(len(Gn), len(Gn) + 2), km, | ||||
k, r_max, l, gkernel) | k, r_max, l, gkernel) | ||||
time_total = time.time() - time0 + time_km | time_total = time.time() - time0 + time_km | ||||
@@ -610,5 +614,5 @@ if __name__ == '__main__': | |||||
# random pre-image paper.) | # random pre-image paper.) | ||||
# test_random_preimage_2combination() | # test_random_preimage_2combination() | ||||
# test_gkiam_2combination() | # test_gkiam_2combination() | ||||
test_gkiam_2combination_all_pairs() | |||||
# test_preimage_mix_2combination_all_pairs() | |||||
# test_gkiam_2combination_all_pairs() | |||||
test_preimage_mix_2combination_all_pairs() |