-
Notifications
You must be signed in to change notification settings - Fork 3
/
load_data.py
185 lines (151 loc) · 6.07 KB
/
load_data.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
import numpy as np
import base64
import io
import json
from h5 import HDFArchive
import tools.wannier90 as tb_w90
import tools.calc_akw as calc_akw
def load_config(contents, h5_filename, data):
data['config_filename'] = h5_filename
content_type, content_string = contents.split(',')
h5_bytestream = base64.b64decode(content_string)
try:
ar = HDFArchive(h5_bytestream)
except:
print('error in loading file')
data['error'] = True
if 'tb_data' in ar:
data['tb_data'] = ar['tb_data']
data['tb_data']['e_mat_re'] = data['tb_data']['e_mat'].real.tolist()
data['tb_data']['e_mat_im'] = data['tb_data']['e_mat'].imag.tolist()
del data['tb_data']['e_mat']
data['tb_data']['eps_nuk'] = data['tb_data']['eps_nuk'].tolist()
if 'e_vecs' in data['tb_data'].keys():
data['tb_data']['evecs_re'] = data['tb_data']['e_vecs'].real
data['tb_data']['evecs_im'] = data['tb_data']['e_vecs'].imag
del data['tb_data']['e_vecs']
data['tb_data']['hopping'] = {str(key): value.tolist() for key, value in data['tb_data']['hopping'].items()}
if 'sigma_data' in ar:
data['sigma_data'] = ar['sigma_data']
data['sigma_data']['sigma_re'] = data['sigma_data']['sigma'].real.tolist()
data['sigma_data']['sigma_im'] = data['sigma_data']['sigma'].imag.tolist()
del data['sigma_data']['sigma']
data['sigma_data']['w_dict']['w_mesh'] = data['sigma_data']['w_dict']['w_mesh'].tolist()
data['sigma_data']['orbital_order'] = tuple(data['sigma_data']['orbital_order'])
if not 'sigma_data' in ar and not 'tb_data' in ar:
print('error in loading file')
data['error'] = True
else:
data['error'] = False
return data
def load_pythTB_json(contents):
'''
read pythTB json file
Parameters
----------
contents: json string from memory
Returns
-------
norb: int
number of orbitals
units: list of tuples
vectors spanning unit cell
hopping_dict: dict
dict of hopping tuples
'''
def decode(d):
'''
When decoding a json file this will check for the existence of complex
number dictionary objects created by the encoder and translates it into a
complex number.
'''
if "complex" in d:
return complex(d["real"], d["imag"])
if "array" in d:
return np.array(d["list"])
return d
content_type, content_string = contents.split(',')
data_stream = base64.b64decode(content_string)
data = json.loads(data_stream, object_hook = decode)
lat = data['_lat']
hoppings = data['_hoppings']
site_energies = data['_site_energies']
norb = data['_norb']
# extract the lattice dimensions
units = []
for i in lat:
units.append(tuple(i))
unit_dim = np.shape(units)[0]
origin = (0,) * unit_dim
# extract the hoppings
#parsing is taken and adapted from triqs/lattice/utils.py TB_from_pythTB
hopping_dict={}
m_zero = np.zeros((norb, norb), dtype=complex)
#on-site energy
hopping_dict[origin] = np.eye(norb, dtype=complex) * site_energies
#hoppings
for hop, orb_from, orb_to, vector in hoppings:
# if it does not exit create empty entry
if tuple(vector) not in hopping_dict:
hopping_dict[tuple(vector)] = m_zero.copy()
# per default pythTB does not explicitly stores -R
hopping_dict[tuple(-np.array(vector))] = m_zero.copy()
hopping_dict[tuple(vector)][orb_from, orb_to] += hop
# fill -R from +R using H_ij(+R)=[H_ji(-R)]*
# if the user specified -R explicitly we have to sum both hopping
# matrices
# according to pythTB documentation
hopping_dict[tuple(-np.array(vector))][orb_to, orb_from] += np.conj(hop)
return norb, units, hopping_dict
def load_w90_hr(contents):
content_type, content_string = contents.split(',')
w90_hr_stream = base64.b64decode(content_string).decode('utf-8')
hopping, n_wf = tb_w90.parse_hopping_from_wannier90_hr(w90_hr_stream)
# print('number of Wannier orbitals {}'.format(num_wann))
return hopping, n_wf
def load_w90_wout(contents):
content_type, content_string = contents.split(',')
w90_wout_stream = base64.b64decode(content_string).decode('utf-8')
units = tb_w90.parse_lattice_vectors_from_wannier90_wout(w90_wout_stream)
return units
def load_sigma_h5(contents, filename, orbital_order=None):
'''
example to store a suitable sigma:
with HDFArchive(path,'w') as h5:
h5.create_group('self_energy')
h5['self_energy']['Sigma'] = Sigma
h5['self_energy']['w_mesh'] = getX(Sigma.mesh).tolist()
h5['self_energy']['n_w'] = len(getX(Sigma.mesh).tolist())
h5['self_energy']['n_orb'] = Sigma['up_0'].target_shape[0]
h5['self_energy']['dc'] = dc[0]['up'][0,0]
h5['self_energy']['dmft_mu'] = dmft_mu
h5['self_energy']['orbital_order'] = (0,1,2)
'''
data = {'config_filename': filename}
content_type, content_string = contents.split(',')
h5_bytestream = base64.b64decode(content_string)
ar = HDFArchive(h5_bytestream)
# extract from h5
Sigma = ar['self_energy']['Sigma']
orbital_order = ar['self_energy']['orbital_order']
n_orb = ar['self_energy']['n_orb']
dc = ar['self_energy']['dc']
dmft_mu = ar['self_energy']['dmft_mu']
w_mesh = ar['self_energy']['w_mesh']
# setup w_dict
w_dict = {'w_mesh': w_mesh,
'n_w': ar['self_energy']['n_w'],
'window': [w_mesh[0], w_mesh[-1]]}
# TODO able to choose these
spin = 'up'
block = 0
# convert orbital order to list:
sigma_interpolated = calc_akw.sigma_from_dmft(n_orb, orbital_order, Sigma, spin, block, dc, w_dict)
data['sigma_re'] = sigma_interpolated.real.tolist()
data['sigma_im'] = sigma_interpolated.imag.tolist()
data['w_dict'] = w_dict
data['dmft_mu'] = dmft_mu
data['orbital_order'] = orbital_order
data['n_orb'] = n_orb
#print(sigma_interpolated.shape)
return data