From 7118bcf4885ac053ce8874622832c6ef2265c277 Mon Sep 17 00:00:00 2001 From: linlin Date: Mon, 19 Oct 2020 15:29:22 +0200 Subject: [PATCH] New translations random_preimage_generator.py (French) --- .../gklearn/preimage/random_preimage_generator.py | 218 ++++++++++----------- 1 file changed, 109 insertions(+), 109 deletions(-) diff --git a/lang/fr/gklearn/preimage/random_preimage_generator.py b/lang/fr/gklearn/preimage/random_preimage_generator.py index cb28519..c2210f5 100644 --- a/lang/fr/gklearn/preimage/random_preimage_generator.py +++ b/lang/fr/gklearn/preimage/random_preimage_generator.py @@ -26,43 +26,43 @@ class RandomPreimageGenerator(PreimageGenerator): def __init__(self, dataset=None): PreimageGenerator.__init__(self, dataset=dataset) # arguments to set. - self.__k = 5 # number of nearest neighbors of phi in D_N. - self.__r_max = 10 # maximum number of iterations. - self.__l = 500 # numbers of graphs generated for each graph in D_k U {g_i_hat}. - self.__alphas = None # weights of linear combinations of points in kernel space. - self.__parallel = True - self.__n_jobs = multiprocessing.cpu_count() - self.__time_limit_in_sec = 0 - self.__max_itrs = 20 + self._k = 5 # number of nearest neighbors of phi in D_N. + self._r_max = 10 # maximum number of iterations. + self._l = 500 # numbers of graphs generated for each graph in D_k U {g_i_hat}. + self._alphas = None # weights of linear combinations of points in kernel space. + self._parallel = True + self._n_jobs = multiprocessing.cpu_count() + self._time_limit_in_sec = 0 + self._max_itrs = 20 # values to compute. - self.__runtime_generate_preimage = None - self.__runtime_total = None - self.__preimage = None - self.__best_from_dataset = None - self.__k_dis_preimage = None - self.__k_dis_dataset = None - self.__itrs = 0 - self.__converged = False # @todo - self.__num_updates = 0 + self._runtime_generate_preimage = None + self._runtime_total = None + self._preimage = None + self._best_from_dataset = None + self._k_dis_preimage = None + self._k_dis_dataset = None + self._itrs = 0 + self._converged = False # @todo + self._num_updates = 0 # values that can be set or to be computed. - self.__gram_matrix_unnorm = None - self.__runtime_precompute_gm = None + self._gram_matrix_unnorm = None + self._runtime_precompute_gm = None def set_options(self, **kwargs): self._kernel_options = kwargs.get('kernel_options', {}) self._graph_kernel = kwargs.get('graph_kernel', None) self._verbose = kwargs.get('verbose', 2) - self.__k = kwargs.get('k', 5) - self.__r_max = kwargs.get('r_max', 10) - self.__l = kwargs.get('l', 500) - self.__alphas = kwargs.get('alphas', None) - self.__parallel = kwargs.get('parallel', True) - self.__n_jobs = kwargs.get('n_jobs', multiprocessing.cpu_count()) - self.__time_limit_in_sec = kwargs.get('time_limit_in_sec', 0) - self.__max_itrs = kwargs.get('max_itrs', 20) - self.__gram_matrix_unnorm = kwargs.get('gram_matrix_unnorm', None) - self.__runtime_precompute_gm = kwargs.get('runtime_precompute_gm', None) + self._k = kwargs.get('k', 5) + self._r_max = kwargs.get('r_max', 10) + self._l = kwargs.get('l', 500) + self._alphas = kwargs.get('alphas', None) + self._parallel = kwargs.get('parallel', True) + self._n_jobs = kwargs.get('n_jobs', multiprocessing.cpu_count()) + self._time_limit_in_sec = kwargs.get('time_limit_in_sec', 0) + self._max_itrs = kwargs.get('max_itrs', 20) + self._gram_matrix_unnorm = kwargs.get('gram_matrix_unnorm', None) + self._runtime_precompute_gm = kwargs.get('runtime_precompute_gm', None) def run(self): @@ -78,65 +78,65 @@ class RandomPreimageGenerator(PreimageGenerator): start = time.time() # 1. precompute gram matrix. - if self.__gram_matrix_unnorm is None: + if self._gram_matrix_unnorm is None: gram_matrix, run_time = self._graph_kernel.compute(self._dataset.graphs, **self._kernel_options) - self.__gram_matrix_unnorm = self._graph_kernel.gram_matrix_unnorm + self._gram_matrix_unnorm = self._graph_kernel.gram_matrix_unnorm end_precompute_gm = time.time() - self.__runtime_precompute_gm = end_precompute_gm - start + self._runtime_precompute_gm = end_precompute_gm - start else: - if self.__runtime_precompute_gm is None: + if self._runtime_precompute_gm is None: raise Exception('Parameter "runtime_precompute_gm" must be given when using pre-computed Gram matrix.') - self._graph_kernel.gram_matrix_unnorm = self.__gram_matrix_unnorm + self._graph_kernel.gram_matrix_unnorm = self._gram_matrix_unnorm if self._kernel_options['normalize']: - self._graph_kernel.gram_matrix = self._graph_kernel.normalize_gm(np.copy(self.__gram_matrix_unnorm)) + self._graph_kernel.gram_matrix = self._graph_kernel.normalize_gm(np.copy(self._gram_matrix_unnorm)) else: - self._graph_kernel.gram_matrix = np.copy(self.__gram_matrix_unnorm) + self._graph_kernel.gram_matrix = np.copy(self._gram_matrix_unnorm) end_precompute_gm = time.time() - start -= self.__runtime_precompute_gm + start -= self._runtime_precompute_gm # 2. compute k nearest neighbors of phi in D_N. if self._verbose >= 2: print('\nstart computing k nearest neighbors of phi in D_N...\n') D_N = self._dataset.graphs - if self.__alphas is None: - self.__alphas = [1 / len(D_N)] * len(D_N) + if self._alphas is None: + self._alphas = [1 / len(D_N)] * len(D_N) k_dis_list = [] # distance between g_star and each graph. term3 = 0 - for i1, a1 in enumerate(self.__alphas): - for i2, a2 in enumerate(self.__alphas): + for i1, a1 in enumerate(self._alphas): + for i2, a2 in enumerate(self._alphas): term3 += a1 * a2 * self._graph_kernel.gram_matrix[i1, i2] for idx in range(len(D_N)): - k_dis_list.append(compute_k_dis(idx, range(0, len(D_N)), self.__alphas, self._graph_kernel.gram_matrix, term3=term3, withterm3=True)) + k_dis_list.append(compute_k_dis(idx, range(0, len(D_N)), self._alphas, self._graph_kernel.gram_matrix, term3=term3, withterm3=True)) # sort. sort_idx = np.argsort(k_dis_list) - dis_gs = [k_dis_list[idis] for idis in sort_idx[0:self.__k]] # the k shortest distances. + dis_gs = [k_dis_list[idis] for idis in sort_idx[0:self._k]] # the k shortest distances. nb_best = len(np.argwhere(dis_gs == dis_gs[0]).flatten().tolist()) g0hat_list = [D_N[idx].copy() for idx in sort_idx[0:nb_best]] # the nearest neighbors of phi in D_N - self.__best_from_dataset = g0hat_list[0] # get the first best graph if there are muitlple. - self.__k_dis_dataset = dis_gs[0] + self._best_from_dataset = g0hat_list[0] # get the first best graph if there are muitlple. + self._k_dis_dataset = dis_gs[0] - if self.__k_dis_dataset == 0: # get the exact pre-image. + if self._k_dis_dataset == 0: # get the exact pre-image. end_generate_preimage = time.time() - self.__runtime_generate_preimage = end_generate_preimage - end_precompute_gm - self.__runtime_total = end_generate_preimage - start - self.__preimage = self.__best_from_dataset.copy() - self.__k_dis_preimage = self.__k_dis_dataset + self._runtime_generate_preimage = end_generate_preimage - end_precompute_gm + self._runtime_total = end_generate_preimage - start + self._preimage = self._best_from_dataset.copy() + self._k_dis_preimage = self._k_dis_dataset if self._verbose: print() print('=============================================================================') print('The exact pre-image is found from the input dataset.') print('-----------------------------------------------------------------------------') - print('Distance in kernel space for the best graph from dataset and for preimage:', self.__k_dis_dataset) - print('Time to pre-compute Gram matrix:', self.__runtime_precompute_gm) - print('Time to generate pre-images:', self.__runtime_generate_preimage) - print('Total time:', self.__runtime_total) + print('Distance in kernel space for the best graph from dataset and for preimage:', self._k_dis_dataset) + print('Time to pre-compute Gram matrix:', self._runtime_precompute_gm) + print('Time to generate pre-images:', self._runtime_generate_preimage) + print('Total time:', self._runtime_total) print('=============================================================================') print() return dhat = dis_gs[0] # the nearest distance - Gk = [D_N[ig].copy() for ig in sort_idx[0:self.__k]] # the k nearest neighbors + Gk = [D_N[ig].copy() for ig in sort_idx[0:self._k]] # the k nearest neighbors Gs_nearest = [nx.convert_node_labels_to_integers(g) for g in Gk] # [g.copy() for g in Gk] # 3. start iterations. @@ -146,12 +146,12 @@ class RandomPreimageGenerator(PreimageGenerator): dihat_list = [] r = 0 dis_of_each_itr = [dhat] - if self.__parallel: + if self._parallel: self._kernel_options['parallel'] = None - self.__itrs = 0 - self.__num_updates = 0 - timer = Timer(self.__time_limit_in_sec) - while not self.__termination_criterion_met(timer, self.__itrs, r): + self._itrs = 0 + self._num_updates = 0 + timer = Timer(self._time_limit_in_sec) + while not self._termination_criterion_met(timer, self._itrs, r): print('\n- r =', r) found = False dis_bests = dis_gs + dihat_list @@ -173,7 +173,7 @@ class RandomPreimageGenerator(PreimageGenerator): nb_modif = 1 for idx, nb in enumerate(range(nb_vpairs_min, nb_vpairs_min - fdgs_max, -1)): nb_modif *= nb / (fdgs_max - idx) - while fdgs_max < nb_vpairs_min and nb_modif < self.__l: + while fdgs_max < nb_vpairs_min and nb_modif < self._l: fdgs_max += 1 nb_modif *= (nb_vpairs_min - fdgs_max + 1) / fdgs_max nb_increase = int(fdgs_max - fdgs_max_old) @@ -184,7 +184,7 @@ class RandomPreimageGenerator(PreimageGenerator): for ig, gs in enumerate(Gs_nearest + gihat_list): if self._verbose >= 2: print('-- computing', ig + 1, 'graphs out of', len(Gs_nearest) + len(gihat_list)) - gnew, dhat, found = self.__generate_l_graphs(gs, fdgs_list[ig], dhat, ig, found, term3) + gnew, dhat, found = self._generate_l_graphs(gs, fdgs_list[ig], dhat, ig, found, term3) if found: r = 0 @@ -194,51 +194,51 @@ class RandomPreimageGenerator(PreimageGenerator): r += 1 dis_of_each_itr.append(dhat) - self.__itrs += 1 + self._itrs += 1 if self._verbose >= 2: - print('Total number of iterations is', self.__itrs, '.') - print('The preimage is updated', self.__num_updates, 'times.') + print('Total number of iterations is', self._itrs, '.') + print('The preimage is updated', self._num_updates, 'times.') print('The shortest distances for previous iterations are', dis_of_each_itr, '.') # get results and print. end_generate_preimage = time.time() - self.__runtime_generate_preimage = end_generate_preimage - end_precompute_gm - self.__runtime_total = end_generate_preimage - start - self.__preimage = (g0hat_list[0] if len(gihat_list) == 0 else gihat_list[0]) - self.__k_dis_preimage = dhat + self._runtime_generate_preimage = end_generate_preimage - end_precompute_gm + self._runtime_total = end_generate_preimage - start + self._preimage = (g0hat_list[0] if len(gihat_list) == 0 else gihat_list[0]) + self._k_dis_preimage = dhat if self._verbose: print() print('=============================================================================') print('Finished generation of preimages.') print('-----------------------------------------------------------------------------') - print('Distance in kernel space for the best graph from dataset:', self.__k_dis_dataset) - print('Distance in kernel space for the preimage:', self.__k_dis_preimage) - print('Total number of iterations for optimizing:', self.__itrs) - print('Total number of updating preimage:', self.__num_updates) - print('Time to pre-compute Gram matrix:', self.__runtime_precompute_gm) - print('Time to generate pre-images:', self.__runtime_generate_preimage) - print('Total time:', self.__runtime_total) + print('Distance in kernel space for the best graph from dataset:', self._k_dis_dataset) + print('Distance in kernel space for the preimage:', self._k_dis_preimage) + print('Total number of iterations for optimizing:', self._itrs) + print('Total number of updating preimage:', self._num_updates) + print('Time to pre-compute Gram matrix:', self._runtime_precompute_gm) + print('Time to generate pre-images:', self._runtime_generate_preimage) + print('Total time:', self._runtime_total) print('=============================================================================') print() - def __generate_l_graphs(self, g_init, fdgs, dhat, ig, found, term3): - if self.__parallel: - gnew, dhat, found = self.__generate_l_graphs_parallel(g_init, fdgs, dhat, ig, found, term3) + def _generate_l_graphs(self, g_init, fdgs, dhat, ig, found, term3): + if self._parallel: + gnew, dhat, found = self._generate_l_graphs_parallel(g_init, fdgs, dhat, ig, found, term3) else: - gnew, dhat, found = self.__generate_l_graphs_series(g_init, fdgs, dhat, ig, found, term3) + gnew, dhat, found = self._generate_l_graphs_series(g_init, fdgs, dhat, ig, found, term3) return gnew, dhat, found - def __generate_l_graphs_series(self, g_init, fdgs, dhat, ig, found, term3): + def _generate_l_graphs_series(self, g_init, fdgs, dhat, ig, found, term3): gnew = None updated = False - for trial in range(0, self.__l): + for trial in range(0, self._l): if self._verbose >= 2: - print('---', trial + 1, 'trial out of', self.__l) + print('---', trial + 1, 'trial out of', self._l) - gtemp, dnew = self.__do_trial(g_init, fdgs, term3, trial) + gtemp, dnew = self._do_trial(g_init, fdgs, term3, trial) # get the better graph preimage. if dnew <= dhat: # @todo: the new distance is smaller or also equal? @@ -257,14 +257,14 @@ class RandomPreimageGenerator(PreimageGenerator): found = True # found better or equally good graph. if updated: - self.__num_updates += 1 + self._num_updates += 1 return gnew, dhat, found - def __generate_l_graphs_parallel(self, g_init, fdgs, dhat, ig, found, term3): + def _generate_l_graphs_parallel(self, g_init, fdgs, dhat, ig, found, term3): gnew = None - len_itr = self.__l + len_itr = self._l gnew_list = [None] * len_itr dnew_list = [None] * len_itr itr = range(0, len_itr) @@ -295,7 +295,7 @@ class RandomPreimageGenerator(PreimageGenerator): print('I am smaller!') print('index (as in D_k U {gihat}) =', str(ig)) print('distance:', dhat, '->', dnew, '\n') - self.__num_updates += 1 + self._num_updates += 1 else: if self._verbose >= 2: print('I am equal!') @@ -308,11 +308,11 @@ class RandomPreimageGenerator(PreimageGenerator): def _generate_graph_parallel(self, g_init, fdgs, term3, itr): trial = itr - gtemp, dnew = self.__do_trial(g_init, fdgs, term3, trial) + gtemp, dnew = self._do_trial(g_init, fdgs, term3, trial) return trial, gtemp, dnew - def __do_trial(self, g_init, fdgs, term3, trial): + def _do_trial(self, g_init, fdgs, term3, trial): # add and delete edges. gtemp = g_init.copy() seed = (trial + int(time.time())) % (2 ** 32 - 1) @@ -339,51 +339,51 @@ class RandomPreimageGenerator(PreimageGenerator): kernels_to_gtmp, _ = self._graph_kernel.compute(gtemp, self._dataset.graphs, **self._kernel_options) kernel_gtmp, _ = self._graph_kernel.compute(gtemp, gtemp, **self._kernel_options) if self._kernel_options['normalize']: - kernels_to_gtmp = [kernels_to_gtmp[i] / np.sqrt(self.__gram_matrix_unnorm[i, i] * kernel_gtmp) for i in range(len(kernels_to_gtmp))] # normalize + kernels_to_gtmp = [kernels_to_gtmp[i] / np.sqrt(self._gram_matrix_unnorm[i, i] * kernel_gtmp) for i in range(len(kernels_to_gtmp))] # normalize kernel_gtmp = 1 # @todo: not correct kernel value gram_with_gtmp = np.concatenate((np.array([kernels_to_gtmp]), np.copy(self._graph_kernel.gram_matrix)), axis=0) gram_with_gtmp = np.concatenate((np.array([[kernel_gtmp] + kernels_to_gtmp]).T, gram_with_gtmp), axis=1) - dnew = compute_k_dis(0, range(1, 1 + len(self._dataset.graphs)), self.__alphas, gram_with_gtmp, term3=term3, withterm3=True) + dnew = compute_k_dis(0, range(1, 1 + len(self._dataset.graphs)), self._alphas, gram_with_gtmp, term3=term3, withterm3=True) return gtemp, dnew def get_results(self): results = {} - results['runtime_precompute_gm'] = self.__runtime_precompute_gm - results['runtime_generate_preimage'] = self.__runtime_generate_preimage - results['runtime_total'] = self.__runtime_total - results['k_dis_dataset'] = self.__k_dis_dataset - results['k_dis_preimage'] = self.__k_dis_preimage - results['itrs'] = self.__itrs - results['num_updates'] = self.__num_updates + results['runtime_precompute_gm'] = self._runtime_precompute_gm + results['runtime_generate_preimage'] = self._runtime_generate_preimage + results['runtime_total'] = self._runtime_total + results['k_dis_dataset'] = self._k_dis_dataset + results['k_dis_preimage'] = self._k_dis_preimage + results['itrs'] = self._itrs + results['num_updates'] = self._num_updates return results - def __termination_criterion_met(self, timer, itr, r): - if timer.expired() or (itr >= self.__max_itrs if self.__max_itrs >= 0 else False): -# if self.__state == AlgorithmState.TERMINATED: -# self.__state = AlgorithmState.INITIALIZED + def _termination_criterion_met(self, timer, itr, r): + if timer.expired() or (itr >= self._max_itrs if self._max_itrs >= 0 else False): +# if self._state == AlgorithmState.TERMINATED: +# self._state = AlgorithmState.INITIALIZED return True - return (r >= self.__r_max if self.__r_max >= 0 else False) -# return converged or (itrs_without_update > self.__max_itrs_without_update if self.__max_itrs_without_update >= 0 else False) + return (r >= self._r_max if self._r_max >= 0 else False) +# return converged or (itrs_without_update > self._max_itrs_without_update if self._max_itrs_without_update >= 0 else False) @property def preimage(self): - return self.__preimage + return self._preimage @property def best_from_dataset(self): - return self.__best_from_dataset + return self._best_from_dataset @property def gram_matrix_unnorm(self): - return self.__gram_matrix_unnorm + return self._gram_matrix_unnorm @gram_matrix_unnorm.setter def gram_matrix_unnorm(self, value): - self.__gram_matrix_unnorm = value \ No newline at end of file + self._gram_matrix_unnorm = value \ No newline at end of file