From 8a5301961e2ed3418b3dd0aa048fc29dbed12ca2 Mon Sep 17 00:00:00 2001 From: Alexander Hampel Date: Mon, 26 Feb 2024 12:30:03 -0500 Subject: [PATCH] [feat] improved standard behavior of block struct * previously the default gf_struct_solver had keys up / down, inconsistent with the default behavior after analyse_block_structure was run: up_0 / down_0. Now the default solver structure always has the _0 in the key. * old behavior resulted in error when analyse_block_structure was called twice * fixed analyse block structure tests with new changes * to correctly use analyse_block_structure use now extract_G_loc(transform_to_solver_blocks=False) * changed density_matrix function to use directly extract_G_loc() if using_gf is selected. * print deprecation warning in density_matrix, same can be achieved via extract_G_loc and [G.density() for G in Gloc] * new function density_matrix_using_point_integration() * enforce in analyse block structure that input dm or G is list with length of n_corr_shells * correct doc string for how include_shells are given * fixed other tests accordingly * fixed small bug in initial block structure regarding length of lists --- python/triqs_dft_tools/sumk_dft.py | 170 ++++++++++-------- .../python/analyse_block_structure_from_gf.py | 131 +++++++------- .../analyse_block_structure_from_gf.ref.h5 | Bin 76008 -> 75233 bytes .../analyse_block_structure_from_gf2.py | 4 +- test/python/dc_test.py | 27 ++- test/python/sumkdft_basic.py | 10 +- 6 files changed, 185 insertions(+), 157 deletions(-) diff --git a/python/triqs_dft_tools/sumk_dft.py b/python/triqs_dft_tools/sumk_dft.py index 79933ba10..1e3ac27ab 100644 --- a/python/triqs_dft_tools/sumk_dft.py +++ b/python/triqs_dft_tools/sumk_dft.py @@ -168,8 +168,8 @@ def __init__(self, hdf_file, h_field=0.0, mesh=None, beta=40, n_iw=1025, use_dft # blocks possible self.gf_struct_sumk = [[(sp, self.corr_shells[icrsh]['dim']) for sp in self.spin_block_names[self.corr_shells[icrsh]['SO']]] for icrsh in range(self.n_corr_shells)] - # First set a standard gf_struct solver: - self.gf_struct_solver = [dict([(sp, self.corr_shells[self.inequiv_to_corr[ish]]['dim']) + # First set a standard gf_struct solver (add _0 here for consistency with analyse_block_structure): + self.gf_struct_solver = [dict([(sp+'_0', self.corr_shells[self.inequiv_to_corr[ish]]['dim']) for sp in self.spin_block_names[self.corr_shells[self.inequiv_to_corr[ish]]['SO']]]) for ish in range(self.n_inequiv_shells)] # Set standard (identity) maps from gf_struct_sumk <-> @@ -180,12 +180,12 @@ def __init__(self, hdf_file, h_field=0.0, mesh=None, beta=40, n_iw=1025, use_dft for ish in range(self.n_inequiv_shells)] for ish in range(self.n_inequiv_shells): for block, inner_dim in self.gf_struct_sumk[self.inequiv_to_corr[ish]]: - self.solver_to_sumk_block[ish][block] = block + self.solver_to_sumk_block[ish][block+'_0'] = block for inner in range(inner_dim): self.sumk_to_solver[ish][ - (block, inner)] = (block, inner) + (block, inner)] = (block+'_0', inner) self.solver_to_sumk[ish][ - (block, inner)] = (block, inner) + (block+'_0', inner)] = (block, inner) # assume no shells are degenerate self.deg_shells = [[] for ish in range(self.n_inequiv_shells)] @@ -862,8 +862,8 @@ def analyse_block_structure(self, threshold=0.00001, include_shells=None, dm=Non If the difference between density matrix / hloc elements is below threshold, they are considered to be equal. include_shells : list of integers, optional - List of correlated shells to be analysed. - If include_shells is not provided all correlated shells will be analysed. + List of inequivalent shells to be analysed. + If include_shells is not provided all inequivalent shells will be analysed. dm : list of dict, optional List of density matrices from which block stuctures are to be analysed. Each density matrix is a dict {block names: 2d numpy arrays} for each correlated shell. @@ -874,14 +874,11 @@ def analyse_block_structure(self, threshold=0.00001, include_shells=None, dm=Non If not provided, it will be calculated using eff_atomic_levels. """ - self.gf_struct_solver = [{} for ish in range(self.n_inequiv_shells)] - self.sumk_to_solver = [{} for ish in range(self.n_inequiv_shells)] - self.solver_to_sumk = [{} for ish in range(self.n_inequiv_shells)] - self.solver_to_sumk_block = [{} - for ish in range(self.n_inequiv_shells)] - if dm is None: - dm = self.density_matrix(method='using_point_integration') + warn("WARNING: No density matrix given. Calculating density matrix with default parameters. This will be deprecated in future releases.") + dm = self.density_matrix(method='using_gf', transform_to_solver_blocks=False) + + assert len(dm) == self.n_corr_shells, "The number of density matrices must be equal to the number of correlated shells." dens_mat = [dm[self.inequiv_to_corr[ish]] for ish in range(self.n_inequiv_shells)] if hloc is None: @@ -890,8 +887,13 @@ def analyse_block_structure(self, threshold=0.00001, include_shells=None, dm=Non if include_shells is None: include_shells = list(range(self.n_inequiv_shells)) for ish in include_shells: + self.gf_struct_solver[ish] = {} + self.sumk_to_solver[ish] = {} + self.solver_to_sumk[ish] = {} + self.solver_to_sumk_block[ish] = {} for sp in self.spin_block_names[self.corr_shells[self.inequiv_to_corr[ish]]['SO']]: + assert sp in dens_mat[ish], f"The density matrix does not contain the block {sp}. Is the input dm given in sumk block structure?" n_orb = self.corr_shells[self.inequiv_to_corr[ish]]['dim'] # gives an index list of entries larger that threshold dmbool = (abs(dens_mat[ish][sp]) > threshold) @@ -1049,8 +1051,8 @@ def analyse_block_structure_from_gf(self, G, threshold=1.e-5, include_shells=Non If the difference between matrix elements is below threshold, they are considered to be equal. include_shells : list of integers, optional - List of correlated shells to be analysed. - If include_shells is not provided all correlated shells will be analysed. + List of inequivalent shells to be analysed. + If include_shells is not provided all inequivalent shells will be analysed. analyse_deg_shells : bool Whether to call the analyse_deg_shells function after having finished the block structure analysis @@ -1062,29 +1064,26 @@ def analyse_block_structure_from_gf(self, G, threshold=1.e-5, include_shells=Non """ assert isinstance(G, list), "G must be a list (with elements for each correlated shell)" + assert len(G) == self.n_corr_shells, "G must have one element for each correlated shell, run extract_G_loc with transform_to_solver_blocks=False to get the correct G" gf = self._get_hermitian_quantity_from_gf(G) - # initialize the variables - self.gf_struct_solver = [{} for ish in range(self.n_inequiv_shells)] - self.sumk_to_solver = [{} for ish in range(self.n_inequiv_shells)] - self.solver_to_sumk = [{} for ish in range(self.n_inequiv_shells)] - self.solver_to_sumk_block = [{} - for ish in range(self.n_inequiv_shells)] - - # the maximum value of each matrix element of each block and shell - max_gf = [{name:np.max(np.abs(g.data),0) for name, g in gf[ish]} for ish in range(self.n_inequiv_shells)] - if include_shells is None: # include all shells include_shells = list(range(self.n_inequiv_shells)) for ish in include_shells: + self.gf_struct_solver[ish] = {} + self.sumk_to_solver[ish] = {} + self.solver_to_sumk[ish] = {} + self.solver_to_sumk_block[ish] = {} for sp in self.spin_block_names[self.corr_shells[self.inequiv_to_corr[ish]]['SO']]: + assert sp in gf[self.inequiv_to_corr[ish]].indices, f"The Green's function does not contain the block {sp}. Is the input G given in sumk block structure?" n_orb = self.corr_shells[self.inequiv_to_corr[ish]]['dim'] # gives an index list of entries larger that threshold - maxgf_bool = (abs(max_gf[ish][sp]) > threshold) + max_gf = np.max(np.abs(gf[self.inequiv_to_corr[ish]][sp].data),0) + maxgf_bool = (max_gf > threshold) # Determine off-diagonal entries in upper triangular part of the # Green's function @@ -1455,21 +1454,12 @@ def calculate_diagonalization_matrix(self, prop_to_be_diagonal='eal', calc_in_so return trans - - def density_matrix(self, method='using_gf'): - """Calculate density matrices in one of two ways. - - Parameters - ---------- - method : string, optional - - - if 'using_gf': First get lattice gf (g_loc is not set up), then density matrix. - It is useful for Hubbard I, and very quick. - No assumption on the hopping structure is made (ie diagonal or not). - - if 'using_point_integration': Only works for diagonal hopping matrix (true in wien2k). - - beta : float, optional - Inverse temperature. + def density_matrix_using_point_integration(self): + """ + Calculate density matrices using point integration: Only works for + diagonal hopping matrix (true in wien2k). Consider using extract_G_loc + together with [G.density() for G in Gloc] instead. Returned density + matrix is always given in SumK block structure. Returns ------- @@ -1483,34 +1473,21 @@ def density_matrix(self, method='using_gf'): [self.corr_shells[icrsh]['dim'], self.corr_shells[icrsh]['dim']], complex) ikarray = np.array(list(range(self.n_k))) + ntoi = self.spin_names_to_ind[self.SO] + spn = self.spin_block_names[self.SO] for ik in mpi.slice_array(ikarray): - - if method == "using_gf": - - G_latt_iw = self.lattice_gf(ik=ik, mu=self.chemical_potential) - G_latt_iw *= self.bz_weights[ik] - dm = G_latt_iw.density() - MMat = [dm[sp] for sp in self.spin_block_names[self.SO]] - - elif method == "using_point_integration": - - ntoi = self.spin_names_to_ind[self.SO] - spn = self.spin_block_names[self.SO] - dims = {sp:self.n_orbitals[ik, ntoi[sp]] for sp in spn} - MMat = [np.zeros([dims[sp], dims[sp]], complex) for sp in spn] - - for isp, sp in enumerate(spn): - ind = ntoi[sp] - for inu in range(self.n_orbitals[ik, ind]): - # only works for diagonal hopping matrix (true in - # wien2k) - if (self.hopping[ik, ind, inu, inu] - self.h_field * (1 - 2 * isp)) < 0.0: - MMat[isp][inu, inu] = 1.0 - else: - MMat[isp][inu, inu] = 0.0 - - else: - raise ValueError("density_matrix: the method '%s' is not supported." % method) + dims = {sp:self.n_orbitals[ik, ntoi[sp]] for sp in spn} + MMat = [np.zeros([dims[sp], dims[sp]], complex) for sp in spn] + + for isp, sp in enumerate(spn): + ind = ntoi[sp] + for inu in range(self.n_orbitals[ik, ind]): + # only works for diagonal hopping matrix (true in + # wien2k) + if (self.hopping[ik, ind, inu, inu] - self.h_field * (1 - 2 * isp)) < 0.0: + MMat[isp][inu, inu] = 1.0 + else: + MMat[isp][inu, inu] = 0.0 for icrsh in range(self.n_corr_shells): for isp, sp in enumerate(self.spin_block_names[self.corr_shells[icrsh]['SO']]): @@ -1519,11 +1496,7 @@ def density_matrix(self, method='using_gf'): dim = self.corr_shells[icrsh]['dim'] n_orb = self.n_orbitals[ik, ind] projmat = self.proj_mat[ik, ind, icrsh, 0:dim, 0:n_orb] - if method == "using_gf": - dens_mat[icrsh][sp] += np.dot(np.dot(projmat, MMat[isp]), - projmat.transpose().conjugate()) - elif method == "using_point_integration": - dens_mat[icrsh][sp] += self.bz_weights[ik] * np.dot(np.dot(projmat, MMat[isp]), + dens_mat[icrsh][sp] += self.bz_weights[ik] * np.dot(np.dot(projmat, MMat[isp]), projmat.transpose().conjugate()) # get data from nodes: @@ -1546,6 +1519,55 @@ def density_matrix(self, method='using_gf'): return dens_mat + + def density_matrix(self, method='using_gf', mu=None, with_Sigma=True, with_dc=True, broadening=None, + transform_to_solver_blocks=True, show_warnings=True): + """Calculate density matrices in one of two ways. + + Parameters + ---------- + method : string, optional + + - if 'using_gf': First get lattice gf (g_loc is not set up), then density matrix. + It is useful for Hubbard I, and very quick. + No assumption on the hopping structure is made (ie diagonal or not). + - if 'using_point_integration': Only works for diagonal hopping matrix (true in wien2k). + mu : real, optional + Input chemical potential. If not provided the value of self.chemical_potential is used as mu. + with_Sigma : boolean, optional + If True then the local GF is calculated with the self-energy self.Sigma_imp. + with_dc : boolean, optional + If True then the double-counting correction is subtracted from the self-energy in calculating the GF. + broadening : float, optional + Imaginary shift for the axis along which the real-axis GF is calculated. + If not provided, broadening will be set to double of the distance between mesh points in 'mesh'. + Only relevant for real-frequency GF. + transform_to_solver_blocks : bool, optional + If True (default), the returned G_loc will be transformed to the block structure ``gf_struct_solver``, + else it will be in ``gf_struct_sumk``. + show_warnings : bool, optional + Displays warning messages during transformation + (Only effective if transform_to_solver_blocks = True + + Returns + ------- + dens_mat : list of dicts + Density matrix for each spin in each correlated shell. + """ + + if method == "using_gf": + warn("WARNING: density_matrix: method 'using_gf' is deprecated. Use 'extract_G_loc' instead.") + Gloc = self.extract_G_loc(mu, with_Sigma, with_dc, broadening, + transform_to_solver_blocks, show_warnings) + dens_mat = [G.density() for G in Gloc] + elif method == "using_point_integration": + warn("WARNING: density_matrix: method 'using_point_integration' is deprecated. Use 'density_matrix_using_point_integration' instead. All additionally provided arguments are ignored.") + dens_mat = self.density_matrix_using_point_integration() + else: + raise ValueError("density_matrix: the method '%s' is not supported." % method) + + return dens_mat + # For simple dft input, get crystal field splittings. def eff_atomic_levels(self): r""" diff --git a/test/python/analyse_block_structure_from_gf.py b/test/python/analyse_block_structure_from_gf.py index e9775ee6e..7551fc047 100644 --- a/test/python/analyse_block_structure_from_gf.py +++ b/test/python/analyse_block_structure_from_gf.py @@ -1,9 +1,9 @@ -from triqs.gf import * +from triqs.gf import MeshImFreq, inverse, Omega from triqs_dft_tools.sumk_dft import SumkDFT from scipy.linalg import expm import numpy as np from triqs.utility.comparison_tests import assert_gfs_are_close, assert_arrays_are_close, assert_block_gfs_are_close -from h5 import * +from h5 import HDFArchive import itertools # The full test checks all different possible combinations of conjugated @@ -19,11 +19,10 @@ ####################################################################### mesh = MeshImFreq(40, 'Fermion', 1025) -SK = SumkDFT(hdf_file = 'SrIrO3_rot.h5', mesh=mesh) +SK = SumkDFT(hdf_file='SrIrO3_rot.h5', mesh=mesh) Sigma = SK.block_structure.create_gf(mesh=mesh) SK.put_Sigma([Sigma]) -G = SK.extract_G_loc() - +G = SK.extract_G_loc(transform_to_solver_blocks=False) # the original block structure block_structure1 = SK.block_structure.copy() G_new = SK.analyse_block_structure_from_gf(G) @@ -31,30 +30,29 @@ # the new block structure block_structure2 = SK.block_structure.copy() -with HDFArchive('analyse_block_structure_from_gf.out.h5','w') as ar: +with HDFArchive('analyse_block_structure_from_gf.out.h5', 'w') as ar: ar['bs1'] = block_structure1 ar['bs2'] = block_structure2 # check whether the block structure is the same as in the reference -with HDFArchive('analyse_block_structure_from_gf.out.h5','r') as ar,\ - HDFArchive('analyse_block_structure_from_gf.ref.h5','r') as ar2: +with HDFArchive('analyse_block_structure_from_gf.out.h5', 'r') as ar, HDFArchive('analyse_block_structure_from_gf.ref.h5', 'r') as ar2: assert ar['bs1'] == ar2['bs1'], 'bs1 not equal' a1 = ar['bs2'] a2 = ar2['bs2'] - assert a1==block_structure2, "writing/reading block structure incorrect" + assert a1 == block_structure2, 'writing/reading block structure incorrect' # we set the deg_shells to None because the transformation matrices # have a phase freedom and will, therefore, not be equal in general a1.deg_shells = None a2.deg_shells = None - assert a1==a2, 'bs2 not equal' + assert a1 == a2, 'bs2 not equal' # check if deg shells are correct -assert len(SK.deg_shells[0])==1, "wrong number of equivalent groups" +assert len(SK.deg_shells[0]) == 1, 'wrong number of equivalent groups' # check if the Green's functions that are found to be equal in the # routine are indeed equal for d in SK.deg_shells[0]: - assert len(d)==2, "wrong number of shells in equivalent group" + assert len(d) == 2, 'wrong number of shells in equivalent group' # the convention is that for every degenerate shell, the transformation # matrix v and the conjugate bool is saved # then, @@ -70,8 +68,8 @@ normalized_gf << normalized_gf.transpose() normalized_gfs.append(normalized_gf) for i in range(len(normalized_gfs)): - for j in range(i+1,len(normalized_gfs)): - assert_arrays_are_close(normalized_gfs[i].data, normalized_gfs[j].data, 1.e-5) + for j in range(i + 1, len(normalized_gfs)): + assert_arrays_are_close(normalized_gfs[i].data, normalized_gfs[j].data, 1.0e-5) ####################################################################### # Second test # @@ -80,18 +78,21 @@ # model # ####################################################################### + # helper function to get random Hermitian matrix def get_random_hermitian(dim): - herm = np.random.rand(dim,dim)+1.0j*np.random.rand(dim,dim) + herm = np.random.rand(dim, dim) + 1.0j * np.random.rand(dim, dim) herm = herm + herm.conjugate().transpose() return herm + # helper function to get random unitary matrix def get_random_transformation(dim): herm = get_random_hermitian(dim) - T = expm(1.0j*herm) + T = expm(1.0j * herm) return T + # we will conjugate the Green's function blocks according to the entries # of conjugate_values # for each of the 5 blocks that will be constructed, there is an entry @@ -101,34 +102,34 @@ def get_random_transformation(dim): conjugate_values = list(itertools.product([False, True], repeat=5)) else: # in the quick test we check a random combination - conjugate_values = [np.random.rand(5)>0.5] + conjugate_values = [np.random.rand(5) > 0.5] for conjugate in conjugate_values: # construct a random block-diagonal Hloc - Hloc = np.zeros((10,10), dtype=complex) + Hloc = np.zeros((10, 10), dtype=complex) # the Hloc of the first three 2x2 blocks is equal Hloc0 = get_random_hermitian(2) - Hloc[:2,:2] = Hloc0 - Hloc[2:4,2:4] = Hloc0 - Hloc[4:6,4:6] = Hloc0 + Hloc[:2, :2] = Hloc0 + Hloc[2:4, 2:4] = Hloc0 + Hloc[4:6, 4:6] = Hloc0 # the Hloc of the last two 2x2 blocks is equal Hloc1 = get_random_hermitian(2) - Hloc[6:8,6:8] = Hloc1 - Hloc[8:,8:] = Hloc1 + Hloc[6:8, 6:8] = Hloc1 + Hloc[8:, 8:] = Hloc1 # construct the hybridization delta # this is equal for all 2x2 blocks - V = get_random_hermitian(2) # the hopping elements from impurity to bath - b1 = np.random.rand() # the bath energy of the first bath level - b2 = np.random.rand() # the bath energy of the second bath level - delta = G[0]['ud'][:2,:2].copy() - delta[0,0] << (V[0,0]*V[0,0].conjugate()*inverse(Omega-b1)+V[0,1]*V[0,1].conjugate()*inverse(Omega-b2))/2.0 - delta[0,1] << (V[0,0]*V[1,0].conjugate()*inverse(Omega-b1)+V[0,1]*V[1,1].conjugate()*inverse(Omega-b2))/2.0 - delta[1,0] << (V[1,0]*V[0,0].conjugate()*inverse(Omega-b1)+V[1,1]*V[0,1].conjugate()*inverse(Omega-b2))/2.0 - delta[1,1] << (V[1,0]*V[1,0].conjugate()*inverse(Omega-b1)+V[1,1]*V[1,1].conjugate()*inverse(Omega-b2))/2.0 + V = get_random_hermitian(2) # the hopping elements from impurity to bath + b1 = np.random.rand() # the bath energy of the first bath level + b2 = np.random.rand() # the bath energy of the second bath level + delta = G[0]['ud'][:2, :2].copy() + delta[0, 0] << (V[0, 0] * V[0, 0].conjugate() * inverse(Omega - b1) + V[0, 1] * V[0, 1].conjugate() * inverse(Omega - b2)) / 2.0 + delta[0, 1] << (V[0, 0] * V[1, 0].conjugate() * inverse(Omega - b1) + V[0, 1] * V[1, 1].conjugate() * inverse(Omega - b2)) / 2.0 + delta[1, 0] << (V[1, 0] * V[0, 0].conjugate() * inverse(Omega - b1) + V[1, 1] * V[0, 1].conjugate() * inverse(Omega - b2)) / 2.0 + delta[1, 1] << (V[1, 0] * V[1, 0].conjugate() * inverse(Omega - b1) + V[1, 1] * V[1, 1].conjugate() * inverse(Omega - b2)) / 2.0 # construct G G[0].zero() - for i in range(0,10,2): - G[0]['ud'][i:i+2,i:i+2] << inverse(Omega-delta) + for i in range(0, 10, 2): + G[0]['ud'][i : i + 2, i : i + 2] << inverse(Omega - delta) G[0]['ud'] << inverse(inverse(G[0]['ud']) - Hloc) # for testing symm_deg_gf below, we need this @@ -136,12 +137,12 @@ def get_random_transformation(dim): # mean of the blocks of G_noisy is equal to G[0] G_noisy = G[0].copy() noise1 = np.random.randn(*delta.target_shape) - G_noisy['ud'][:2,:2].data[:,:,:] += noise1 - G_noisy['ud'][2:4,2:4].data[:,:,:] -= noise1/2.0 - G_noisy['ud'][4:6,4:6].data[:,:,:] -= noise1/2.0 + G_noisy['ud'][:2, :2].data[:, :, :] += noise1 + G_noisy['ud'][2:4, 2:4].data[:, :, :] -= noise1 / 2.0 + G_noisy['ud'][4:6, 4:6].data[:, :, :] -= noise1 / 2.0 noise2 = np.random.randn(*delta.target_shape) - G_noisy['ud'][6:8,6:8].data[:,:,:] += noise2 - G_noisy['ud'][8:,8:].data[:,:,:] -= noise2 + G_noisy['ud'][6:8, 6:8].data[:, :, :] += noise2 + G_noisy['ud'][8:, 8:].data[:, :, :] -= noise2 # for testing backward-compatibility in symm_deg_gf, we need the # un-transformed Green's functions @@ -149,33 +150,35 @@ def get_random_transformation(dim): G_noisy_pre_transform = G_noisy.copy() # transform each block using a random transformation matrix - for i in range(0,10,2): + for i in range(0, 10, 2): T = get_random_transformation(2) - G[0]['ud'][i:i+2,i:i+2].from_L_G_R(T, G[0]['ud'][i:i+2,i:i+2], T.conjugate().transpose()) - G_noisy['ud'][i:i+2,i:i+2].from_L_G_R(T, G_noisy['ud'][i:i+2,i:i+2], T.conjugate().transpose()) + G[0]['ud'][i : i + 2, i : i + 2].from_L_G_R(T, G[0]['ud'][i : i + 2, i : i + 2], T.conjugate().transpose()) + G_noisy['ud'][i : i + 2, i : i + 2].from_L_G_R(T, G_noisy['ud'][i : i + 2, i : i + 2], T.conjugate().transpose()) # if that block shall be conjugated, go ahead and do it - if conjugate[i//2]: - G[0]['ud'][i:i+2,i:i+2] << G[0]['ud'][i:i+2,i:i+2].transpose() - G_noisy['ud'][i:i+2,i:i+2] << G_noisy['ud'][i:i+2,i:i+2].transpose() + if conjugate[i // 2]: + G[0]['ud'][i : i + 2, i : i + 2] << G[0]['ud'][i : i + 2, i : i + 2].transpose() + G_noisy['ud'][i : i + 2, i : i + 2] << G_noisy['ud'][i : i + 2, i : i + 2].transpose() # analyse the block structure - G_new = SK.analyse_block_structure_from_gf(G, 1.e-7) + G_new = SK.analyse_block_structure_from_gf(G, 1.0e-7) # transform G_noisy etc. to the new block structure - G_noisy = SK.block_structure.convert_gf(G_noisy, block_structure1, beta = G_noisy.mesh.beta, space_from='sumk') - G_pre_transform = SK.block_structure.convert_gf(G_pre_transform, block_structure1, beta = G_noisy.mesh.beta, space_from='sumk') - G_noisy_pre_transform = SK.block_structure.convert_gf(G_noisy_pre_transform, block_structure1, beta = G_noisy.mesh.beta, space_from='sumk') - - assert len(SK.deg_shells[0]) == 2, "wrong number of equivalent groups found" - assert sorted([len(d) for d in SK.deg_shells[0]]) == [2,3], "wrong number of members in the equivalent groups found" + G_noisy = SK.block_structure.convert_gf(G_noisy, block_structure1, beta=G_noisy.mesh.beta, space_from='sumk') + G_pre_transform = SK.block_structure.convert_gf(G_pre_transform, block_structure1, beta=G_noisy.mesh.beta, space_from='sumk') + G_noisy_pre_transform = SK.block_structure.convert_gf( + G_noisy_pre_transform, block_structure1, beta=G_noisy.mesh.beta, space_from='sumk' + ) + + assert len(SK.deg_shells[0]) == 2, 'wrong number of equivalent groups found' + assert sorted([len(d) for d in SK.deg_shells[0]]) == [2, 3], 'wrong number of members in the equivalent groups found' for d in SK.deg_shells[0]: - if len(d)==2: - assert 'ud_3' in d, "shell ud_3 missing" - assert 'ud_4' in d, "shell ud_4 missing" - if len(d)==3: - assert 'ud_0' in d, "shell ud_0 missing" - assert 'ud_1' in d, "shell ud_1 missing" - assert 'ud_2' in d, "shell ud_2 missing" + if len(d) == 2: + assert 'ud_3' in d, 'shell ud_3 missing' + assert 'ud_4' in d, 'shell ud_4 missing' + if len(d) == 3: + assert 'ud_0' in d, 'shell ud_0 missing' + assert 'ud_1' in d, 'shell ud_1 missing' + assert 'ud_2' in d, 'shell ud_2 missing' # the convention is that for every degenerate shell, the transformation # matrix v and the conjugate bool is saved @@ -192,21 +195,21 @@ def get_random_transformation(dim): normalized_gf << normalized_gf.transpose() normalized_gfs.append(normalized_gf) for i in range(len(normalized_gfs)): - for j in range(i+1,len(normalized_gfs)): + for j in range(i + 1, len(normalized_gfs)): # here, we use a threshold that is 1 order of magnitude less strict # because of numerics - assert_gfs_are_close(normalized_gfs[i], normalized_gfs[j], 1.e-6) + assert_gfs_are_close(normalized_gfs[i], normalized_gfs[j], 1.0e-6) # now we check symm_deg_gf # symmetrizing the GF has is has to leave it unchanged G_new_symm = G_new[0].copy() SK.symm_deg_gf(G_new_symm, 0) - assert_block_gfs_are_close(G_new[0], G_new_symm, 1.e-6) + assert_block_gfs_are_close(G_new[0], G_new_symm, 1.0e-6) # symmetrizing the noisy GF, which was carefully constructed, # has to give the same result as G_new[0] SK.symm_deg_gf(G_noisy, 0) - assert_block_gfs_are_close(G_new[0], G_noisy, 1.e-6) + assert_block_gfs_are_close(G_new[0], G_noisy, 1.0e-6) # check backward compatibility of symm_deg_gf # first, construct the old format of the deg shells @@ -217,9 +220,9 @@ def get_random_transformation(dim): # symmetrizing the GF as is has to leave it unchanged G_new_symm << G_pre_transform SK.symm_deg_gf(G_new_symm, 0) - assert_block_gfs_are_close(G_new_symm, G_pre_transform, 1.e-6) + assert_block_gfs_are_close(G_new_symm, G_pre_transform, 1.0e-6) # symmetrizing the noisy GF pre transform, which was carefully constructed, # has to give the same result as G_pre_transform SK.symm_deg_gf(G_noisy_pre_transform, 0) - assert_block_gfs_are_close(G_noisy_pre_transform, G_pre_transform, 1.e-6) + assert_block_gfs_are_close(G_noisy_pre_transform, G_pre_transform, 1.0e-6) diff --git a/test/python/analyse_block_structure_from_gf.ref.h5 b/test/python/analyse_block_structure_from_gf.ref.h5 index e046860512cd1a04b2918461f2c7fecf8a0cf7ba..d42fd419ffa6743c8a70531e426e75259f25c2cf 100644 GIT binary patch literal 75233 zcmeHQ4Rl<^b)J=sy|($0!GO$9kO9MzK#;+JNMcFGKgh(`GPa3Gf3&hJS*m4A){iSU zNh>FuA`__P{D7K3WA_A(ph+qqO%!sF?Y0CZ#@&&AT&q?#z7e&Ye5={q3w;vh0i*7t9dq7mbQ3qR2VKpJV)cZC=F5 z0*&%GA5f_fy-W`j&_P6~e?k;<{w$WS^R2yO=~5v%As;DM6NgWuBX93eYSUatpla!& zYBk0h9w_u_g!K&LU*9p`Ib7dyos&W@E5R_yG$IN;`dh^M+jBPwinBaF{T-W5jOagC zvq_#H0R0{0@%D4tw2BT)de(g>mB*Ri#5dCDMh4#^dj%Vlk-oZ1PM3XA>jNKUcF!o*<~fzo z%72=TZw^%WyWIF;(Z#bFWIT1|AM^pX|T-`cipL`W|6)1M|}J zo+b^AA<{QT7AH=-Qt&~)bG@!f>-IjQTHq5Oi>XSY)%;lqft zyYA{Y@Q$%uoCn~I^Mv_wOG|70rZt`IUG<$^?Q&*(oa^a!;}e{(+l{LY?ObD!^GSo8 zYYlQr4f*hwFg}c%!*(tn7&nJ4lI-dNLq9pZ;mU#h8x8%riSu=Q$~Yf(9sR!9!2c;t zKHB}XmXC39n8fWT@)0fpKCpY`200a+Kb=$E?p3BPckSMy)a9<-yCrqGYgZPhE_dz9 zlGNp{-CUZw+_jq=-qP)=Yd3FAUEj5vRjJEeySbdpfj7?EZCXCg&u2M5Xz^mm*$`f(QQr@v9|82X)Qe{8*QBB1Lp;*NU|FydiBe-Rgf9#>{KY}ofW z2eNv&y-e+U;`*VEP4lCh12J*#w~4Qk154mTT%4}2;3|{6Ux)h-JoNE!Gd9r>@b{-O zy>2(iXU=QUlKrY)tw$%9)UVr(BLHjY7DiW%32ETVw`_mCnyjC`pYvx_?!9Lx-?Tn|#|!F8oWO|-481N_$GUcSM?JNtN$HpzJe z{Pg4S5`w_$N6xE4$^n|}VJX`URLZu$*3I@8{@PK#@vKPe2l``ELcrf&%QUVT#&Jfe1T0b&wFh?t5{nPOZV*NWqVg~v8`#ge>ukM}Co*!Dnx9*3_4~3d; zY^?2Yo}#Ml=-RwVhy=Q|Wm`i#_&OY_wzJjYt6i@izfxrz8aCE;G&Qu~{Z5_jbz3?b zotqq+Tek>N-`d`;YBg_Z*xJ>+O?c%9p;~pMa%-5ExbFt-`}s%v`N0hDSwL-@<~joU z`U3r_!2>-E0Q@7|CIWq{%n%@dyE57PE1KAzV1=ZPCGTtdGVF17tI>Kp@qV{K4S&6L z%-85Ty(GWs;Pt94)OuyQ?htJtJ&F6MOX7Vxc(y*V3j7cKy<Ui0J-Fw`41X5dr9B zoE2_>2hrC;2PV5*`qxw*XMPi3(Su&T5`Iov-?ta@>IJ=T<9X5dd)$CcQX=5*M`U{a zWuTTCgry=X*-!0F*{_q8NqRMfpGg#=`VZ3g1p9~~9<7Nx#swpB69?tPj*ix)=R4I) z)jch^?{k(s&YqqPZ6qwhrDxg0W5nVi6=LGK;Zci+2ahp}hX;@GP~w68sP5+ku^(0M zIy3oWeSa(q-B88J-o`MnPyQ$HixL@kBO{^R`V?+Fj${Hb@+!7r~NaNPF- zR> z5)bn0r-~{%nPO}`9~ctMoAG)B181}G5knkjG;w!xK`|;A7<)NC)@)$l?0!yroU3f< zd4nv?o?exOQm=rm{(euF=J&?AG`}a#rTM*ZF3s@`m*zM5xir7I&!zcIeJ;&!9&~Ab z(;(s^L+EfwQop$PVZH(#4as}Q*1KTUkq-zU9_Yw9&q(zrfxu{=(s)w}t#cb=Piq;^Dz#z~bS-W60v+!DEDXAmLhH;792Dn;`s%D4 znxoM!KS!fo+WoE!^Sj@XVcPv}4Abs+aF}+#dxLn!5IP)^)U{KE)M0#wK2tlTo;|}p zM8$yo;@#1&QoFDRMA^Ujf&k)@gFIl^-MTj@!?ezAeVfYTT+hUJpwPP>;<-Jv3%x65 zIsNzW0Vk!L2KD@u-Y&eFA}mZ=yRa)&-fm5r!aN_^MO8<4Q<~{KSMS)yJpY6|PV^_{ zY4jD6_Ok*@FU3_6-Sh0+?%U;W>FU`YJtS@qYbBF1YFW3zJ<0h(tM&#R7V`d)n> z4g4tUH|+5sU!Q}1w()Z;eSevwK=};IZ=MZ&g9HFSA91!g#}Lr{Jgh_G>~~?fSSNz= zFKK>He4nIOo430Ehkn@9i$M0`Fh{>hEqN_NIG!V$SU38=?!`-|+|fOtm*h8HAcv2A z)5qnpzn)F@eH!Ag2w|^>m#klXr{syiK2`TaGTo;pz6arIIby$cd(T6u&^slevYpBT z0{XfQJ2t5Kxux7T{NbwO?2DgX7Jr8tK!1+2Ia@ts2w-?O*D4D^ve6@jA#SrHa9S)#GEC%1`uPuT7Az$EAaiuhtGfKcsSi z=8^a8Q=a{s`v~ZM2<+}bzK#L}lrE_s{DdkZGyZG3zI*h0Br?vD_5Q(6YUArL=m5T- zAydayLO(0PFv--^zmg#m6!(#r7#Fxk^1?6hn~(jElVF?&pJr@vjv=7Odt=)5#u8qw zt2O=4F&K_}IKlf#C48mVyHY0m-F?U1_iPBz{qNq#(zgQpbNFx4^W|G<@Ol*swO+wL z*6mUd{;OKZAz#10BMAA|N%;{`;P^CgR%Em{GDsO?Pf{90dpAf?YJZ`ANd5eQ_*2C1 zwK|k%|K>gdy8jEi+{XF|1WNe;e(&-6=YD!Q^mo)C`h$MhG(Ql~^%DECDBCNb5cIxG z_m2np`B~__Znx17n|cxO_hT}RPm3Q@OJ2*6>?iknJ9LsM-9H}qVfqeXbVWSc^xXMV zni%)EUJg5ZdRDZLP{o-b>QvV=_?P;9!$J6$H#&G1I{qx|aS`8v|JHvIK50a$T5tsc71pM~rO+O<}*lpCaX?`G}&ojYx#hqvS35S?wZ}J13k3LGjdA5IqWZ8N4 z(J{>>PSZMG#P%0sg`W}ZeVh}|Z|u?Lx4xc3KR3KcJ%Sy$kLmBdgr4SC8WGc+{WASg z@rsxOK!l&)#dR7}S|}(^Hmjt9zsXc77R(e2j`909PLsI`>w-cOI8&53bc9cUda^4b z|7~b!Y^m#PaLQB%-1mzW6h?pH^vJnnq`Kj3X_Dg{mBe+NfAu)n;B)B(La)~TDm`Dm zm8x|^o!4X9Z?+E*l^Tw^-r4=E4}iAVB481)2v`Ix0u}*_K*$j2j?~A+j;uN@hk^0=3|Ao2}=U(~6ZWg>ujM&3)HJjuK0;br_^I9w<=-5p} ze(lGZpKc7G%t1WD?>5x;eCisC`kC^x7lSBq+AS=YvW^Xd>Vo>Oy5w#}b^CyO4!-z` zQ(aV3BLAkIX?o1z@=>=!$^%Gwp5>=x%EQdl`RjoZak`i-Pfu-aee347mWJ-yS_h3e zGU%kRQ~$9(r~+0)C&jfprI9CY-6`d(qv(n1)v34c;NBfQW*Wq`(wk|>ssEaSN#FM3 zmAbOb{pOgr$Ye;|8SQ1altsWIU=gqgSOhEr76FTZMZh8uA_Tf4k-kXlhu7cM_}rYv z+TkNNJ^rhYJ=tFV+xl`GxCe{^FsAUBeH3f8(ajJGKqK(>C6AUElKI_%F-1 zJ@(Z%_r3qm-+1+ob6(gddRqG)ee}u-v0(35>u+w^xBI@IzWZcD?eN?OAK!Mil=ts1 zeW7My`|9C`H$FLbNyD!4i!R;u$n86}m0$b(yV@ix6Jt0#+N&W-+yo3k)QWh4?nVF#_W|Zca)2QuPu2y zvh5jB^rJT)_;vllifewk@!EUetavzn--|!L=!J?G8-Lw-`Ac=>M>gGkX6uhWUtT)$ zrGrm(_6+~+#s7Bx;7j+Gk45i4QX}bW|Ks5s?vk`P_UsEUuiU**#4bCu@{P8IU)i;3 z_xp<*%MX6{p10OM>Uf58+_@8>$ zJuT^hANK8BCFv_xJ^Nt<{4qxs1mezX7E#wVH8*4k78d~rHmCp9MEgg-m?VIa5n_IUCQQz9$u4*-JY1rD; zyiJoM<;qqasoWYu5ycu#enI%39Hk20v!DsFWX>XB3c|1E4})OfrF;<74V3lf_etarV`#rg$5smfbCX^Ql+An68U z2)gV4=!h;Y3o(BhwcHN|o8~kE(91Y0+yIYu=X7WwWvCCI)|O6uiq)n0Q>-q{pJH`s z{uHZA^QTz#2hV2l>V;ODd0y-s&uRFNdvN}KM5g=Ry}zT*!BP>G?5Fk~_ZWf{KzcRB zz3XK8`{|`)ANmB3*2Ep-f&(9zI4B=>^w5Xt`A+rH>9T{k$wNlivKU`^fEFK;_-U=li*pKRdP7wRi<&M2h{eq{Rv&qAXOCI$z?Yj?M zr1MRcyiNu6R~|<>brOM_OHDTqPTWjRVkYKyOJ7jrEdX4vU8ekKGmz4<36h9v(cNuy}azI53aazHqHC@FVp7O%Q&> zI*HF@e~JFU?Xq$ry;7rBX>_$luh!^0B#n4sBP5Uv01=8PvUNYB^Hd&r8o&#e6%?PtV6}fhF1VOAdu{9s$^(;}R^7qsy-U{gM4~u78v6 ypAVtKAxUAU4&TikzK7CG?bMmFI`$!-3ke3cn-&3!!0{oF=ufPXq7vz~lKy{JlVdOd literal 76008 zcmeHQ4RjpUbskAz7W22}ckP6*{FQ~!W^HU(lUiPgp^8Icu}u-CMB^V}85?_%EVIz$ zR6aC~;2eW6rO_d!h@?4HX?nW(D=G)u3ME9iO{!AT2tph2Q=)Ks4&tY^LQCkpz4x1) zx!TdpYG*xKX$GwO?tAy$``&%;-TVG#=ia7OSD!F{(R@ukk%)Gb7A*wv=Occ`5_N?r z;7HdwK&hJ6&3J!31?n{QpQgn)egX5BbnDh!b(LmNf%UR%zlmdrkTEaCo!YhMI8wAuz~Dc%pKLpo2tz-~3Y%j9 zc1HTi@nh|ZNHR*^ve+-vb~4Wx*_mTZJJZen5&b>S_%N3P{yT9xi2z-O@hlT0xCHq# z?Q|oIPn<>ZX{PJDz)si070h2k@r6l*5gazgoO&QPw}bHBEJ2-smhYed=rVsu_{3cV zb4-`L+fFy~Wv08A2|NPW8L6-7+L_E-9-xQa zgEo(GZcekk(Y;ju0rpwosj?vKCx7trlfSA5%CB@_#g#6sxYCIgSGuv{N=H^)`Cu!q z!d6^O5v{l~{Z{;V1Gnn;1d*=%iS&b@BKw1Pws9!@xhzN@6RRhI>}+YShM0J-9c0>wZmp>E=TZeSXC^KK;szw>f z;)l-~>}))|V&B3?-&x<-{^PfQxX8fYe0|3Y=NdQ+jH{(VA43LM5oz(PBMsh4<#o$9 z9ZR_oh4PWVbtEn__*(Ig3H;*%SI*T+{|SLBr)y1*3;b+>pCj;d4cw~V^8|jrfh+3= zOZ*!GS5DrVu4V~V{6c|WB=AoP{9=Lsroewo;GZ(^+I;G?qYFK_jf-E$9vr>8!PwtW z;i%!Pl)uunbm8yO2BzZc>+1`o5#G^js2f*`Hy)y=(ut&Q#{I7 zsVA?TO!$}@@W1^gJhh+G`gDvRHlS%uJSBykEaN?*9F@qNjd@{bq`sk9rp`)r^TR!A3T`~;UxbB!=ZXfT zoq->Y#qAoXmLHb;x78jWBWj6hJbK|T{IK(Vg9)|7G@fvLkRImRU#IbrV!H`_bZ({) z{A8u`ROyF@<97ZNjK@Sdh${RG4ZsgeeN|>>R&CwH=O|9|{j}P8G9JC~7j~xJalRi@ zTT{jpFZ?ymrFd(}uX!D_4Z#f60PIYj%h${>&i%ltpN;Un5uNFs=wi!>0CY^w_d<&E z{gm2rH6GY8o$)UAgOnQnl^TGZk@~94&J@pU{on7?)G?y**k?A@{o3dDJiMO#&`~B; zAKNv`JNYl>kGk4gpW?wb&xi|MoOxu_)_e^={{ctS#dVmomJJVTvKUW7E&V-*a-sfb8}>Ey0|I zE}oht>XlYUo>M$1|9}f!nt3GEk?9l<)=B*?bQ$ImgW{*)>1UGv_Or=Te{Zp66A!!SgQV_aj{U|yfDu!@^@wzs$J+S#&oN9!G(TX$=lTeq}y zw6$*E-a(AF+}hGnxYgX!v2*+G)^>=q#CP7lP17h)Pf(hL9~r!!B!aGOJ>lKIKi!hlc0Sk-sjnktKT_9{{?Kn~Tbp6&%hiCiAMo=7 zpSE*?{qW1r%kzhBzRn7}kmChRuPBGTR3QfYA@x;`{ZRM!p?7+ZV_j!_F580p+CWcm`M z<2@0P{}QAh!+b^hQlw+vB+{259r1u%{!5Y0uiMJ5j>J`^< zMSfGS{Q8#6Z|e07DxOR?^$I&D^}3Mr$#`|MT|J^aroM4qNu-!WVQt4HuR>%*_7$#k>5;)UFk@|*2-cIkApz0N6}ZnhVWiADL$_Tp<1lln0q|4fQk zH}Tbj@uS9FOR{J=a9lJgoi&bi+oW{X_=UvY&VO-{KOdJ)PG|m~bjr{Cu^ncUulRF| zV0&v)I=2Y6t8D42{+yqW?d;`K>1K;k0r7hPT-*?Z8VEHIY9Q1=sDV%ep$0+?gc_Jq z12~_|G8JxeVIQEuct2Bx7aD)mno|9vG+J2~-{++0;`^K#UixAsN&-sX4=V065Bx6C zIsFBuEW zZlnmOd!O1Zl&*6=C%W_oI~|j2*uRwHvz+})wHHP^-i0pC*%CL{^Vb^YfBUt0YCr$wW&@I&3rKEPKyteS zlG_uI+}?oX_IZ*Edz0h<&JUL`j_X7-^KUDlnLcM zpE2Hl0biZhk6of1Wr~^gBq|kJ~<9--fB16x`Z2 z5gqzd599gPsq@d)RHyxy#I00goh$CPAE<31!Z_0pY^Okm@es^z4ZvUIxtf?SMYtb0 z^|R<5ltJfwPIQ@0I~|jo`+;&Cmt*IS?0(>&r*(7DEn<#TB?_vC(8A5p7#sv zS}0;M4(9GG#+3>sIC8>;4t6n@E5)UD;xO;>za8VA+Oc}_lL5<52P{7muzcN#tc)`l_D%u7KtDc#=KIvh_jn_k!K}U(X+h{mAX+gHyYu6 z1n?i;Lz2huK?y=wN&~PTQeTzXk0Y_akoPn{k^Lmf_NtUF!Z>FQxcY&;lsBA*IO}jW zr#Sh67_Y-ocfZ&FuwNrzCJ7Vw60FRA73V)r`wJuYlN_A@e}~cez1x8Yh%m+U`G+Zh zbt6ZIV3ukC<3paqCK0rTNrKZ1wizhCQ`o z_2iEREPp&;`I@?JY&D#nn_ znB4eXhK9m#Gr%rHSx;?$QJTJ(uUhBNaZmcHUV9}2mY?<{U!KEsx!B)kG~7c7l!k2_ zo&1y1>4?p7An!tlew^faGxAH=%MlZQDpRV9SFaysJhdZg$LAjA@6|FMp2I*v(}ZsO zALEY_WAx`~iV3sTs0Oe;7-0j{#rAQ!>w_!R2W!?AzI@Z!Zrq)7>wnQcQk;EuQlrWY zi(R$`Fz;*ToH2&IY?r_g`)_z31N#7J5R?gN{&0->T~B>4F#zA@uM(c)a-dujJU&1- z{5a8#f1RMYZhXqlub=Uurzt*7KpP&m(@iiQ<9Tm}>Gc1$(?xzwc%m?SEq-S?-9RNn z`9@EqQ1`I}bIebhZ>JkOgYd{Af*^B)upd%i)3qN*;P=sZUJ|iY&AJ|Xf7`yBsecrI z*RhxPsce}nNR-3Pk6R+cpwT;NorHPyDK^g>QLTP7`e?<(yx0&&rC`XxMW<5FVjMP_UcE)JGkxDC!qCpJ!sZxovNIa{AE={XWDLaLeJ0C30{chUnL)-!|CUOC{+atO z;h^hdeDLp3(zyj;XQaN$u`^zNU$BSOTBjunp}Kl%fulLz3F12=3*swzPcf}6Jx*Kt zQ3GWxerJRj7QZu+<^7BymO#L|ANAaxhu6#fD2?rzEt3U7KN|avB{B?}`%zu2i^=;@ zw4)j`KCr1;K58t0sRu-~tFO3b?NvtHx-~Z_P)KlY*OcmRL_~qYU)ios_YYD}QYM}A zy7{-J0U|`ZbG8u75e+!`w^89QlB^HE{2T1D^iTfTMlDv{WHf$LAc<9^<>X z&hf;rzMkwb?)&uoJqgfR4uEyMX0!WlcC+)+PWSRS20sZgUdLI!OrNWG(LXL{JKBS( zP(dZfqaSF;8~;VLmX=LBZ#O;)o@=2WYF2jJ_}_Tag}=yLgmf;^RY+B)oT!oBVEBI( zuG|ki&rJ-@B+%Nglz(^f1L^%luM==RPuB;DkZQx3LNI4EApHRB%NUI+ySm$!P&69?nB|_+hvxQ)eXh4n+>^DXR?Fy*& zeq$fob@cZk#>WoY%MoIB4ZzMweN|>>-1i%!&rvyaE{>a>8Dsi{z4#$EM>HVq4E$}D z{SVZDc&x{kaKa@FmokK%aO+QU{SkSF7$AR=@r>|;(L;8+6yweR#08jR0Cq;|t1>%d zeLvUldr^mYN@Jh-%XJ3z54CGWPnPYJQ}be;lpJ!Rpm_Mveiynpvxz@HHNONW_qxzc z&82#&{@-hR!BhMBFZbnuBncT|9aQcxRm}Y$>YnZ?U|`I{$nQ!iiiJd`{$|Y;+#C{LWl9* zJUlhOpi=$!BOiEbKmX;{FCZO8Y^s#=9{=Ue4@mCG9_5zyU+EV~O*(7K&3~nSN`x3k zoGk=%L<7=)!A_?5{UoR%$pQE;tT)pjm?qq`v%S4#*Upx$J6iAP+`3!a+`6TuqpfxO z_6}mW<<^#t!rO)|9Xq%0Zf%DcOMK_;#y4juRdMPlfwyc_uPv&)Dz!kx+)xkLbMTiC z{#L%f<(I#88>hLSQHktUP?j>pXR8L}IEB3){Wp7!F;3C1o9xMDV{*T#?;&CV{(bCk z2Duz4SLRha-NY{mkNsEKMVJ*CU_Ye3j+FiA`*%_!+GkcMHLQeZ4M_U|KObkm0!FYO ze))N_AGm#?X+y$(jE=BmQ4W+Rgrou352>$m?1#E&1G`P<@9R2^D>XN^Z^#<))%v}o zMZBJ|TOPT7lYS=7_R6XK!ENk6hywMB{+#e6`yZ$Lvo+r7o>4#RL+6UqeAD@NLAzcf z!Z_2X#wd_sJOr~_1MnAlu4XgC?q5*Ash{=m{7z?jC%WNrA^;teo8RA-soeT=J-WP6BOq2I$#qyMKtP$rc7fw@CfQy&H7vMIyld}I@%IQ%^Y3dq z<)5wbPWvwdtPh15cL{xp-XZ0gBdxPP4tTYoUn6I2bvG`cE;%9!fCU z?Lr5;7>P{H&(3}3VVHMixoDH`&%_FzZlO_4)gtO2)tF{P#d|aqUHl!lw>-6D_2hpL zu>AQalS-%V$JMLlRmCmo6{8ATx4@-w&0jt+u zmwVDz_2jP(SpIrX@^SwL?ceQUf6=cZqCbgd)!H~B#otMj>A-Uw7;vHM20xxRBfo^b zh~Rk@&}zf#^`qN7wIk}MzWdseyx`(Ee+kya`Ae`SzU0C}78Acj;7cv|V!==0R(=Y% z@>95#pTe0R?9BiQTqDqJUt{7Fs~4}4N`=MrO#@gT$nQl~wmvBS-j!Sbi=R&Y5B}b_ ziiM17LIZN%7voa*v0XwXs{Om1ZQ}c>I`3a}iE^MkAtVjJen@>)W_P`Juiu!+FqL>yy^{NokDzALrOcez>dn)h@YIgglmAq}@(%?p|K)(?zvW5(%oz#ZN>aW4 z`hh2XRZsr>Q%D1t2@@vI->WBoktg}`9H_^|{x);7M$etvPU9RXecse`Ip#O$LWgsp zG|!tk+a%D-F_9|ItEAxSwc|ye+7abZ=Q_rz3!egM8smIwW8!>jW8!>jW8!>jW8%x` ZRLI0H75HTWPYHZEaP)=&6u3s<{{w#tJH-G1 diff --git a/test/python/analyse_block_structure_from_gf2.py b/test/python/analyse_block_structure_from_gf2.py index 074a9bf3c..135699511 100644 --- a/test/python/analyse_block_structure_from_gf2.py +++ b/test/python/analyse_block_structure_from_gf2.py @@ -48,7 +48,7 @@ def get_random_transformation(dim): SK = SumkDFT(hdf_file = 'SrIrO3_rot.h5', use_dft_blocks=False) -G_new = SK.analyse_block_structure_from_gf([G]) +G_new = SK.analyse_block_structure_from_gf([G]*2) G_new_symm = G_new[0].copy() SK.symm_deg_gf(G_new_symm, 0) assert_block_gfs_are_close(G_new[0], G_new_symm) @@ -93,7 +93,7 @@ def get_delta_from_mesh(mesh): tail, err = fit_tail(G['ud'], known_moments) Gt['ud'].set_from_fourier(G['ud'], tail) -G_new = SK.analyse_block_structure_from_gf([Gt]) +G_new = SK.analyse_block_structure_from_gf([Gt] * 2) G_new_symm = G_new[0].copy() SK.symm_deg_gf(G_new_symm, 0) assert_block_gfs_are_close(G_new[0], G_new_symm) diff --git a/test/python/dc_test.py b/test/python/dc_test.py index adf527e77..9d7839fa5 100755 --- a/test/python/dc_test.py +++ b/test/python/dc_test.py @@ -55,7 +55,7 @@ def test_dc(SK_compat, SK_new, method, method_dict, dens, Uval, Jval, filename): dc_no = method_dict[method]["numbering_convention"] dc_string = method_dict[method]["new_convention"] - + mpi.report("XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") mpi.report(f"\n Testing interface {method} \n") mpi.report("XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") @@ -71,8 +71,8 @@ def test_dc(SK_compat, SK_new, method, method_dict, dens, Uval, Jval, filename): mpi.report("Up DC matrix:") mpi.report(SK_new.dc_imp[0]['up']) mpi.report(f"Double counting energy = {SK_new.dc_energ} ") - - # Load previously computed DC values from h5 archive + + # Load previously computed DC values from h5 archive R = HDFArchive(f'./{filename}', 'r') dc_comp = R[f'DC_{method}_benchmark']['dc_imp'] en_comp = R[f'DC_{method}_benchmark']['dc_energ'] @@ -84,7 +84,7 @@ def test_dc(SK_compat, SK_new, method, method_dict, dens, Uval, Jval, filename): assert np.allclose(SK_new.dc_imp[0]['up'], dc_comp, atol=1e-12), f"Assertion failed comparing Vdc to reference, method: {method} " assert np.allclose(SK_new.dc_energ, en_comp, atol=1e-12), f"Assertion failed comparing energy to reference, method: {method} " mpi.report("Comparison with stored DC values successfull!\n") - + @@ -104,20 +104,20 @@ def test_dc(SK_compat, SK_new, method, method_dict, dens, Uval, Jval, filename): icrsh = 0 -dens = SK_compat.density_matrix() +dens = SK_compat.density_matrix(transform_to_solver_blocks=True) with np.printoptions(precision=5): for key in dens[0].keys(): mpi.report(f"{key} channel") mpi.report(dens[0][key].real) -N_up = np.trace(dens[0]['up'].real) -N_down = np.trace(dens[0]['down'].real) +N_up = np.trace(dens[0]['up_0'].real) +N_down = np.trace(dens[0]['down_0'].real) N_tot = N_up + N_down mpi.report(f"{N_up=} ,{N_down=}, {N_tot=}\n") -for method in ["FLL", "AMF", "Held"]: +for method in ["FLL", "AMF", "Held"]: test_dc(SK_compat, SK_new, method, method_dict, dens, Uval, Jval, filename = f"{dft_filename}.h5") #in case implementation changes, to write new testing data into archive @@ -141,16 +141,15 @@ def test_dc(SK_compat, SK_new, method, method_dict, dens, Uval, Jval, filename): SK_new = SumkDFT(hdf_file=dft_filename+'.h5',use_dft_blocks=use_blocks) icrsh = 0 -dens = SK_compat.density_matrix() - +dens = SK_compat.density_matrix(transform_to_solver_blocks=True) with np.printoptions(precision=5): for key in dens[0].keys(): mpi.report(f"{key} channel") mpi.report(dens[0][key].real) -N_up = np.trace(dens[0]['up'].real) -N_down = np.trace(dens[0]['down'].real) +N_up = np.trace(dens[0]['up_0'].real) +N_down = np.trace(dens[0]['down_0'].real) N_tot = N_up + N_down mpi.report(f"{N_up=} ,{N_down=}, {N_tot=}\n") @@ -158,9 +157,9 @@ def test_dc(SK_compat, SK_new, method, method_dict, dens, Uval, Jval, filename): Uval = 5 Jval = 0.3 -for method in ["FLL", "AMF", "Held"]: +for method in ["FLL", "AMF", "Held"]: test_dc(SK_compat, SK_new, method, method_dict, dens, Uval, Jval, filename = f"{dft_filename}.h5" ) - + #in case implementation changes, to write new testing data into archive #R = HDFArchive(f'./{dft_filename}.h5', 'a') #R.create_group(f'DC_{method}_benchmark') diff --git a/test/python/sumkdft_basic.py b/test/python/sumkdft_basic.py index 5f6d8f39f..b5345e302 100644 --- a/test/python/sumkdft_basic.py +++ b/test/python/sumkdft_basic.py @@ -20,16 +20,20 @@ # ################################################################################ -from h5 import * +from h5 import HDFArchive from triqs_dft_tools.sumk_dft_tools import SumkDFTTools import triqs.utility.mpi as mpi from triqs.utility.comparison_tests import * from triqs.utility.h5diff import h5diff - +import numpy as np SK = SumkDFTTools(hdf_file = 'SrVO3.ref.h5') -dm = SK.density_matrix(method = 'using_gf') +dm = SK.density_matrix(method = 'using_gf', transform_to_solver_blocks=False, with_Sigma=False) dm_pc = SK.partial_charges(with_Sigma=False, with_dc=False) +dm_pi = SK.density_matrix_using_point_integration() + +for key, value in dm[0].items(): + assert (np.allclose(value, dm_pi[0][key], atol=1e-6, rtol=1e-6)) with HDFArchive('sumkdft_basic.out.h5','w') as ar: ar['dm'] = dm