@@ -92,11 +92,11 @@ def save_trials_as_group(dataset, ds_name, max_num_solutions, ratio): | |||||
ged_mats.append(ged_mat) | ged_mats.append(ged_mat) | ||||
runtimes.append(runtime) | runtimes.append(runtime) | ||||
save_file_suffix = '.' + ds_name + '.mnum_sols_' + str(max_num_solutions) + '.ratio_' + "{:.2f}".format(ratio) | |||||
with open(save_dir + 'groups/ged_mats' + save_file_suffix + '.npy', 'wb') as f: | |||||
np.save(f, np.array(ged_mats)) | |||||
with open(save_dir + 'groups/runtimes' + save_file_suffix + '.pkl', 'wb') as f: | |||||
pickle.dump(runtime, f) | |||||
# save_file_suffix = '.' + ds_name + '.mnum_sols_' + str(max_num_solutions) + '.ratio_' + "{:.2f}".format(ratio) | |||||
# with open(save_dir + 'groups/ged_mats' + save_file_suffix + '.npy', 'wb') as f: | |||||
# np.save(f, np.array(ged_mats)) | |||||
# with open(save_dir + 'groups/runtimes' + save_file_suffix + '.pkl', 'wb') as f: | |||||
# pickle.dump(runtime, f) | |||||
def results_for_a_dataset(ds_name): | def results_for_a_dataset(ds_name): | ||||
@@ -119,10 +119,8 @@ if __name__ == '__main__': | |||||
ds_name_list = ['MAO', 'Monoterpenoides', 'MUTAG', 'AIDS_symb'] | ds_name_list = ['MAO', 'Monoterpenoides', 'MUTAG', 'AIDS_symb'] | ||||
save_dir = 'outputs/edit_costs.max_num_sols.ratios.bipartite/' | save_dir = 'outputs/edit_costs.max_num_sols.ratios.bipartite/' | ||||
if not os.path.exists(save_dir): | |||||
os.makedirs(save_dir) | |||||
if not os.path.exists(save_dir + 'groups/'): | |||||
os.makedirs(save_dir + 'groups/') | |||||
os.makedirs(save_dir, exist_ok=True) | |||||
os.makedirs(save_dir + 'groups/', exist_ok=True) | |||||
for ds_name in ds_name_list: | for ds_name in ds_name_list: | ||||
print() | print() | ||||
@@ -84,11 +84,11 @@ def save_trials_as_group(dataset, ds_name, num_solutions, ratio): | |||||
ged_mats.append(ged_mat) | ged_mats.append(ged_mat) | ||||
runtimes.append(runtime) | runtimes.append(runtime) | ||||
save_file_suffix = '.' + ds_name + '.num_sols_' + str(num_solutions) + '.ratio_' + "{:.2f}".format(ratio) | |||||
with open(save_dir + 'groups/ged_mats' + save_file_suffix + '.npy', 'wb') as f: | |||||
np.save(f, np.array(ged_mats)) | |||||
with open(save_dir + 'groups/runtimes' + save_file_suffix + '.pkl', 'wb') as f: | |||||
pickle.dump(runtime, f) | |||||
# save_file_suffix = '.' + ds_name + '.num_sols_' + str(num_solutions) + '.ratio_' + "{:.2f}".format(ratio) | |||||
# with open(save_dir + 'groups/ged_mats' + save_file_suffix + '.npy', 'wb') as f: | |||||
# np.save(f, np.array(ged_mats)) | |||||
# with open(save_dir + 'groups/runtimes' + save_file_suffix + '.pkl', 'wb') as f: | |||||
# pickle.dump(runtime, f) | |||||
def results_for_a_dataset(ds_name): | def results_for_a_dataset(ds_name): | ||||
@@ -111,10 +111,8 @@ if __name__ == '__main__': | |||||
ds_name_list = ['MAO', 'Monoterpenoides', 'MUTAG', 'AIDS_symb'] | ds_name_list = ['MAO', 'Monoterpenoides', 'MUTAG', 'AIDS_symb'] | ||||
save_dir = 'outputs/edit_costs.num_sols.ratios.IPFP/' | save_dir = 'outputs/edit_costs.num_sols.ratios.IPFP/' | ||||
if not os.path.exists(save_dir): | |||||
os.makedirs(save_dir) | |||||
if not os.path.exists(save_dir + 'groups/'): | |||||
os.makedirs(save_dir + 'groups/') | |||||
os.makedirs(save_dir, exist_ok=True) | |||||
os.makedirs(save_dir + 'groups/', exist_ok=True) | |||||
for ds_name in ds_name_list: | for ds_name in ds_name_list: | ||||
print() | print() | ||||
@@ -87,11 +87,11 @@ def save_trials_as_group(dataset, ds_name, repeats, ratio): | |||||
ged_mats.append(ged_mat) | ged_mats.append(ged_mat) | ||||
runtimes.append(runtime) | runtimes.append(runtime) | ||||
save_file_suffix = '.' + ds_name + '.repeats_' + str(repeats) + '.ratio_' + "{:.2f}".format(ratio) | |||||
with open(save_dir + 'groups/ged_mats' + save_file_suffix + '.npy', 'wb') as f: | |||||
np.save(f, np.array(ged_mats)) | |||||
with open(save_dir + 'groups/runtimes' + save_file_suffix + '.pkl', 'wb') as f: | |||||
pickle.dump(runtime, f) | |||||
# save_file_suffix = '.' + ds_name + '.repeats_' + str(repeats) + '.ratio_' + "{:.2f}".format(ratio) | |||||
# with open(save_dir + 'groups/ged_mats' + save_file_suffix + '.npy', 'wb') as f: | |||||
# np.save(f, np.array(ged_mats)) | |||||
# with open(save_dir + 'groups/runtimes' + save_file_suffix + '.pkl', 'wb') as f: | |||||
# pickle.dump(runtime, f) | |||||
def results_for_a_dataset(ds_name): | def results_for_a_dataset(ds_name): | ||||
@@ -114,10 +114,8 @@ if __name__ == '__main__': | |||||
ds_name_list = ['MAO', 'Monoterpenoides', 'MUTAG', 'AIDS_symb'] | ds_name_list = ['MAO', 'Monoterpenoides', 'MUTAG', 'AIDS_symb'] | ||||
save_dir = 'outputs/edit_costs.repeats.ratios.IPFP/' | save_dir = 'outputs/edit_costs.repeats.ratios.IPFP/' | ||||
if not os.path.exists(save_dir): | |||||
os.makedirs(save_dir) | |||||
if not os.path.exists(save_dir + 'groups/'): | |||||
os.makedirs(save_dir + 'groups/') | |||||
os.makedirs(save_dir, exist_ok=True) | |||||
os.makedirs(save_dir + 'groups/', exist_ok=True) | |||||
for ds_name in ds_name_list: | for ds_name in ds_name_list: | ||||
print() | print() | ||||
@@ -92,11 +92,11 @@ def save_trials_as_group(dataset, ds_name, repeats, ratio): | |||||
ged_mats.append(ged_mat) | ged_mats.append(ged_mat) | ||||
runtimes.append(runtime) | runtimes.append(runtime) | ||||
save_file_suffix = '.' + ds_name + '.repeats_' + str(repeats) + '.ratio_' + "{:.2f}".format(ratio) | |||||
with open(save_dir + 'groups/ged_mats' + save_file_suffix + '.npy', 'wb') as f: | |||||
np.save(f, np.array(ged_mats)) | |||||
with open(save_dir + 'groups/runtimes' + save_file_suffix + '.pkl', 'wb') as f: | |||||
pickle.dump(runtime, f) | |||||
# save_file_suffix = '.' + ds_name + '.repeats_' + str(repeats) + '.ratio_' + "{:.2f}".format(ratio) | |||||
# with open(save_dir + 'groups/ged_mats' + save_file_suffix + '.npy', 'wb') as f: | |||||
# np.save(f, np.array(ged_mats)) | |||||
# with open(save_dir + 'groups/runtimes' + save_file_suffix + '.pkl', 'wb') as f: | |||||
# pickle.dump(runtime, f) | |||||
def results_for_a_dataset(ds_name): | def results_for_a_dataset(ds_name): | ||||
@@ -119,10 +119,8 @@ if __name__ == '__main__': | |||||
ds_name_list = ['MAO', 'Monoterpenoides', 'MUTAG', 'AIDS_symb'] | ds_name_list = ['MAO', 'Monoterpenoides', 'MUTAG', 'AIDS_symb'] | ||||
save_dir = 'outputs/edit_costs.repeats.ratios.bipartite/' | save_dir = 'outputs/edit_costs.repeats.ratios.bipartite/' | ||||
if not os.path.exists(save_dir): | |||||
os.makedirs(save_dir) | |||||
if not os.path.exists(save_dir + 'groups/'): | |||||
os.makedirs(save_dir + 'groups/') | |||||
os.makedirs(save_dir, exist_ok=True) | |||||
os.makedirs(save_dir + 'groups/', exist_ok=True) | |||||
for ds_name in ds_name_list: | for ds_name in ds_name_list: | ||||
print() | print() | ||||
@@ -150,8 +150,7 @@ def xp_accuracy_diff_entropy(): | |||||
import pickle | import pickle | ||||
import os | import os | ||||
save_dir = 'outputs/accuracy_diff_entropy/' | save_dir = 'outputs/accuracy_diff_entropy/' | ||||
if not os.path.exists(save_dir): | |||||
os.makedirs(save_dir) | |||||
os.makedirs(save_dir, exist_ok=True) | |||||
accuracies = {} | accuracies = {} | ||||
confidences = {} | confidences = {} | ||||
@@ -16,8 +16,7 @@ def xp_runtimes_of_all_28cores(): | |||||
import pickle | import pickle | ||||
import os | import os | ||||
save_dir = 'outputs/runtimes_of_all_28cores/' | save_dir = 'outputs/runtimes_of_all_28cores/' | ||||
if not os.path.exists(save_dir): | |||||
os.makedirs(save_dir) | |||||
os.makedirs(save_dir, exist_ok=True) | |||||
run_times = {} | run_times = {} | ||||
@@ -16,8 +16,7 @@ def xp_runtimes_diff_chunksizes(): | |||||
import pickle | import pickle | ||||
import os | import os | ||||
save_dir = 'outputs/runtimes_diff_chunksizes/' | save_dir = 'outputs/runtimes_diff_chunksizes/' | ||||
if not os.path.exists(save_dir): | |||||
os.makedirs(save_dir) | |||||
os.makedirs(save_dir, exist_ok=True) | |||||
run_times = {} | run_times = {} | ||||
@@ -25,8 +25,7 @@ def xp_synthesized_graphs_dataset_size(): | |||||
import pickle | import pickle | ||||
import os | import os | ||||
save_dir = 'outputs/synthesized_graphs_N/' | save_dir = 'outputs/synthesized_graphs_N/' | ||||
if not os.path.exists(save_dir): | |||||
os.makedirs(save_dir) | |||||
os.makedirs(save_dir, exist_ok=True) | |||||
run_times = {} | run_times = {} | ||||
@@ -22,8 +22,7 @@ def xp_synthesized_graphs_degrees(): | |||||
import pickle | import pickle | ||||
import os | import os | ||||
save_dir = 'outputs/synthesized_graphs_degrees/' | save_dir = 'outputs/synthesized_graphs_degrees/' | ||||
if not os.path.exists(save_dir): | |||||
os.makedirs(save_dir) | |||||
os.makedirs(save_dir, exist_ok=True) | |||||
run_times = {} | run_times = {} | ||||
@@ -22,8 +22,7 @@ def xp_synthesized_graphs_num_node_label_alphabet(): | |||||
import pickle | import pickle | ||||
import os | import os | ||||
save_dir = 'outputs/synthesized_graphs_num_node_label_alphabet/' | save_dir = 'outputs/synthesized_graphs_num_node_label_alphabet/' | ||||
if not os.path.exists(save_dir): | |||||
os.makedirs(save_dir) | |||||
os.makedirs(save_dir, exist_ok=True) | |||||
run_times = {} | run_times = {} | ||||
@@ -22,8 +22,7 @@ def xp_synthesized_graphs_num_nodes(): | |||||
import pickle | import pickle | ||||
import os | import os | ||||
save_dir = 'outputs/synthesized_graphs_num_nodes/' | save_dir = 'outputs/synthesized_graphs_num_nodes/' | ||||
if not os.path.exists(save_dir): | |||||
os.makedirs(save_dir) | |||||
os.makedirs(save_dir, exist_ok=True) | |||||
run_times = {} | run_times = {} | ||||
@@ -154,6 +154,6 @@ def test_median_graph_estimator_symb(): | |||||
return set_median, gen_median | return set_median, gen_median | ||||
if __name__ == '__main__': | |||||
if _name_ == '_main_': | |||||
# set_median, gen_median = test_median_graph_estimator() | # set_median, gen_median = test_median_graph_estimator() | ||||
set_median, gen_median = test_median_graph_estimator_symb() | set_median, gen_median = test_median_graph_estimator_symb() |
@@ -126,8 +126,7 @@ def generate_random_preimages_by_class(ds_name, rpg_options, kernel_options, sav | |||||
# save median graphs. | # save median graphs. | ||||
if save_preimages: | if save_preimages: | ||||
if not os.path.exists(dir_save + 'preimages/'): | |||||
os.makedirs(dir_save + 'preimages/') | |||||
os.makedirs(dir_save + 'preimages/', exist_ok=True) | |||||
print('Saving preimages to files...') | print('Saving preimages to files...') | ||||
fn_best_dataset = dir_save + 'preimages/g_best_dataset.' + 'nbg' + str(num_graphs) + '.y' + str(target) + '.repeat' + str(1) | fn_best_dataset = dir_save + 'preimages/g_best_dataset.' + 'nbg' + str(num_graphs) + '.y' + str(target) + '.repeat' + str(1) | ||||
saveGXL(rpg.best_from_dataset, fn_best_dataset + '.gxl', method='default', | saveGXL(rpg.best_from_dataset, fn_best_dataset + '.gxl', method='default', | ||||
@@ -167,8 +166,7 @@ def generate_random_preimages_by_class(ds_name, rpg_options, kernel_options, sav | |||||
def _init_output_file_preimage(ds_name, gkernel, dir_output): | def _init_output_file_preimage(ds_name, gkernel, dir_output): | ||||
if not os.path.exists(dir_output): | |||||
os.makedirs(dir_output) | |||||
os.makedirs(dir_output, exist_ok=True) | |||||
fn_output_detail = 'results_detail.' + ds_name + '.' + gkernel + '.csv' | fn_output_detail = 'results_detail.' + ds_name + '.' + gkernel + '.csv' | ||||
f_detail = open(dir_output + fn_output_detail, 'a') | f_detail = open(dir_output + fn_output_detail, 'a') | ||||
csv.writer(f_detail).writerow(['dataset', 'graph kernel', 'num graphs', | csv.writer(f_detail).writerow(['dataset', 'graph kernel', 'num graphs', | ||||
@@ -218,8 +218,7 @@ def remove_best_graph(ds_name, mpg_options, kernel_options, ged_options, mge_opt | |||||
# save median graphs. | # save median graphs. | ||||
if save_medians: | if save_medians: | ||||
if not os.path.exists(dir_save + 'medians/'): | |||||
os.makedirs(dir_save + 'medians/') | |||||
os.makedirs(dir_save + 'medians/', exist_ok=True) | |||||
print('Saving median graphs to files...') | print('Saving median graphs to files...') | ||||
fn_pre_sm = dir_save + 'medians/set_median.' + mpg_options['fit_method'] + '.nbg' + str(num_graphs) + '.y' + str(target) + '.repeat' + str(1) | fn_pre_sm = dir_save + 'medians/set_median.' + mpg_options['fit_method'] + '.nbg' + str(num_graphs) + '.y' + str(target) + '.repeat' + str(1) | ||||
saveGXL(mpg.set_median, fn_pre_sm + '.gxl', method='default', | saveGXL(mpg.set_median, fn_pre_sm + '.gxl', method='default', | ||||
@@ -375,8 +374,7 @@ def _compute_gram_matrix_unnorm(dataset, kernel_options): | |||||
def _init_output_file(ds_name, gkernel, fit_method, dir_output): | def _init_output_file(ds_name, gkernel, fit_method, dir_output): | ||||
if not os.path.exists(dir_output): | |||||
os.makedirs(dir_output) | |||||
os.makedirs(dir_output, exist_ok=True) | |||||
fn_output_detail = 'results_detail.' + ds_name + '.' + gkernel + '.csv' | fn_output_detail = 'results_detail.' + ds_name + '.' + gkernel + '.csv' | ||||
f_detail = open(dir_output + fn_output_detail, 'a') | f_detail = open(dir_output + fn_output_detail, 'a') | ||||
csv.writer(f_detail).writerow(['dataset', 'graph kernel', 'edit cost', | csv.writer(f_detail).writerow(['dataset', 'graph kernel', 'edit cost', | ||||
@@ -230,8 +230,7 @@ def generate_median_preimages_by_class(ds_name, mpg_options, kernel_options, ged | |||||
# save median graphs. | # save median graphs. | ||||
if save_medians: | if save_medians: | ||||
if not os.path.exists(dir_save + 'medians/'): | |||||
os.makedirs(dir_save + 'medians/') | |||||
os.makedirs(dir_save + 'medians/', exist_ok=True) | |||||
print('Saving median graphs to files...') | print('Saving median graphs to files...') | ||||
fn_pre_sm = dir_save + 'medians/set_median.' + mpg_options['fit_method'] + '.nbg' + str(num_graphs) + '.y' + str(target) + '.repeat' + str(1) | fn_pre_sm = dir_save + 'medians/set_median.' + mpg_options['fit_method'] + '.nbg' + str(num_graphs) + '.y' + str(target) + '.repeat' + str(1) | ||||
saveGXL(mpg.set_median, fn_pre_sm + '.gxl', method='default', | saveGXL(mpg.set_median, fn_pre_sm + '.gxl', method='default', | ||||
@@ -308,8 +307,7 @@ def generate_median_preimages_by_class(ds_name, mpg_options, kernel_options, ged | |||||
def _init_output_file_preimage(ds_name, gkernel, fit_method, dir_output): | def _init_output_file_preimage(ds_name, gkernel, fit_method, dir_output): | ||||
if not os.path.exists(dir_output): | |||||
os.makedirs(dir_output) | |||||
os.makedirs(dir_output, exist_ok=True) | |||||
# fn_output_detail = 'results_detail.' + ds_name + '.' + gkernel + '.' + fit_method + '.csv' | # fn_output_detail = 'results_detail.' + ds_name + '.' + gkernel + '.' + fit_method + '.csv' | ||||
fn_output_detail = 'results_detail.' + ds_name + '.' + gkernel + '.csv' | fn_output_detail = 'results_detail.' + ds_name + '.' + gkernel + '.csv' | ||||
f_detail = open(dir_output + fn_output_detail, 'a') | f_detail = open(dir_output + fn_output_detail, 'a') | ||||
@@ -70,13 +70,11 @@ def save_dataset(Gn, y, gformat='gxl', group=None, filename='gfile', **kwargs): | |||||
dirname_ds = os.path.dirname(filename) | dirname_ds = os.path.dirname(filename) | ||||
if dirname_ds != '': | if dirname_ds != '': | ||||
dirname_ds += '/' | dirname_ds += '/' | ||||
if not os.path.exists(dirname_ds) : | |||||
os.makedirs(dirname_ds) | |||||
os.makedirs(dirname_ds, exist_ok=True) | |||||
if 'graph_dir' in kwargs: | if 'graph_dir' in kwargs: | ||||
graph_dir = kwargs['graph_dir'] + '/' | graph_dir = kwargs['graph_dir'] + '/' | ||||
if not os.path.exists(graph_dir): | |||||
os.makedirs(graph_dir) | |||||
os.makedirs(graph_dir, exist_ok=True) | |||||
del kwargs['graph_dir'] | del kwargs['graph_dir'] | ||||
else: | else: | ||||
graph_dir = dirname_ds | graph_dir = dirname_ds | ||||
@@ -671,13 +671,11 @@ def saveDataset(Gn, y, gformat='gxl', group=None, filename='gfile', xparams=None | |||||
dirname_ds = os.path.dirname(filename) | dirname_ds = os.path.dirname(filename) | ||||
if dirname_ds != '': | if dirname_ds != '': | ||||
dirname_ds += '/' | dirname_ds += '/' | ||||
if not os.path.exists(dirname_ds) : | |||||
os.makedirs(dirname_ds) | |||||
os.makedirs(dirname_ds, exist_ok=True) | |||||
if xparams is not None and 'graph_dir' in xparams: | if xparams is not None and 'graph_dir' in xparams: | ||||
graph_dir = xparams['graph_dir'] + '/' | graph_dir = xparams['graph_dir'] + '/' | ||||
if not os.path.exists(graph_dir): | |||||
os.makedirs(graph_dir) | |||||
os.makedirs(graph_dir, exist_ok=True) | |||||
else: | else: | ||||
graph_dir = dirname_ds | graph_dir = dirname_ds | ||||
@@ -91,8 +91,7 @@ def model_selection_for_precomputed_kernel(datafile, | |||||
tqdm.monitor_interval = 0 | tqdm.monitor_interval = 0 | ||||
output_dir += estimator.__name__ | output_dir += estimator.__name__ | ||||
if not os.path.exists(output_dir): | |||||
os.makedirs(output_dir) | |||||
os.makedirs(output_dir, exist_ok=True) | |||||
# a string to save all the results. | # a string to save all the results. | ||||
str_fw = '###################### log time: ' + datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + '. ######################\n\n' | str_fw = '###################### log time: ' + datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + '. ######################\n\n' | ||||
str_fw += '# This file contains results of ' + estimator.__name__ + ' on dataset ' + ds_name + ',\n# including gram matrices, serial numbers for gram matrix figures and performance.\n\n' | str_fw += '# This file contains results of ' + estimator.__name__ + ' on dataset ' + ds_name + ',\n# including gram matrices, serial numbers for gram matrix figures and performance.\n\n' | ||||
@@ -604,8 +603,7 @@ def model_selection_for_precomputed_kernel(datafile, | |||||
str_fw += 'training time with hyper-param choices who did not participate in calculation of gram matrices: {:.2f}s\n\n'.format(tt_poster) | str_fw += 'training time with hyper-param choices who did not participate in calculation of gram matrices: {:.2f}s\n\n'.format(tt_poster) | ||||
# open file to save all results for this dataset. | # open file to save all results for this dataset. | ||||
if not os.path.exists(output_dir): | |||||
os.makedirs(output_dir) | |||||
os.makedirs(output_dir, exist_ok=True) | |||||
# print out results as table. | # print out results as table. | ||||
str_fw += printResultsInTable(param_list, param_list_pre_revised, average_val_scores, | str_fw += printResultsInTable(param_list, param_list_pre_revised, average_val_scores, | ||||
@@ -458,8 +458,7 @@ def compute_gram_matrices_by_class(ds_name, kernel_options, save_results=True, d | |||||
print() | print() | ||||
print('4. saving results...') | print('4. saving results...') | ||||
if save_results: | if save_results: | ||||
if not os.path.exists(dir_save): | |||||
os.makedirs(dir_save) | |||||
os.makedirs(dir_save, exist_ok=True) | |||||
np.savez(dir_save + 'gram_matrix_unnorm.' + ds_name + '.' + kernel_options['name'] + '.gm', gram_matrix_unnorm_list=gram_matrix_unnorm_list, run_time_list=run_time_list) | np.savez(dir_save + 'gram_matrix_unnorm.' + ds_name + '.' + kernel_options['name'] + '.gm', gram_matrix_unnorm_list=gram_matrix_unnorm_list, run_time_list=run_time_list) | ||||
print('\ncomplete.') | print('\ncomplete.') | ||||