-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathstripop_pol_hk.py
352 lines (293 loc) · 18 KB
/
stripop_pol_hk.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
# -*- encoding: utf-8 -*-
# This file contains the function "pol_hk" that operates an analysis of the Housekeeping of the Polarimeters of Strip.
# This function will be used during the system level test campaign of the LSPE-Strip instrument.
# August 18th 2023, Brescia (Italy) - May 11th 2024, Brescia (Italy)
# Libraries & Modules
import csv
import logging
import time
from jinja2 import Environment, FileSystemLoader
from pathlib import Path
from rich.logging import RichHandler
# MyLibraries & MyModules
import polarimeter as pol
import f_correlation_strip as fz_c
# Use the module logging to produce nice messages on the shell
logging.basicConfig(level="INFO", format='%(message)s',
datefmt="[%X]", handlers=[RichHandler()])
def pol_hk(path_file: str, start_datetime: str, end_datetime: str, name_pol: str,
corr_plot: bool, corr_mat: bool, corr_t: float,
hk_sam_exp_med: dict, hk_sam_tolerance: dict,
output_plot_dir: str, output_report_dir: str,
report_to_plot: str):
"""
Performs only the analysis of the Housekeeping parameters of the polarimeter(s) provided.
Parameters:
- **path_file** (``str``): location of the data file, it is indeed the location of the hdf5 file's index
- **start_datetime** (``str``): start time
- **end_datetime** (``str``): end time
- **name_pol** (``str``): name of the polarimeter. If more than one, write them into ' ' separated by space.
Other Flags:
- **corr_plot** (``bool``): If true, compute the correlation plot of the HK.
- **corr_mat** (``bool``): If true, compute the correlation matrices of the HK.
- **corr_t** (``float``): LimSup for the corr value between two dataset: if overcome a warning is produced.
- **hk_sam_exp_med** (``dict``): contains the exp sampling delta between two consecutive timestamps of HK
- **hk_sam_tolerance** (``dict``): contains the acceptance sampling tolerances of the hk parameters: I,V,O
- **output_report_dir** (`str`): Path from striptease to the dir that contains the reports of the analysis.
- **output_plot_dir** (`str`): Path from striptease to the dir that contains the plots of the analysis.
- **report_to_plot** (`str`): Path from the Report dir to the dir that contain the plots of the analysis.
"""
logging.info("Starting the Pipeline: Polarimeter Housekeeping Operation.")
# Starting chronometer
start_code_time = time.time()
logging.info('\nLoading dir and templates information...\n')
# REPORTS ----------------------------------------------------------------------------------------------------------
# [MD] Markdown REPORT ---------------------------------------------------------------------------------------------
# Initializing the data-dict for the report
report_data = {"output_plot_dir": output_plot_dir, "report_to_plot": report_to_plot}
# Initializing a boolean variable to create a new report file or to overwrite the old ones
first_report = True
# [CSV] REPORT -----------------------------------------------------------------------------------------------------
# HK Information about the whole procedure are collected in a csv file
# csv_output_dir := directory that contains the csv reports
csv_output_dir = f"{output_report_dir}/CSV"
Path(csv_output_dir).mkdir(parents=True, exist_ok=True)
# Heading of the csv file
csv_general = [
["HOUSEKEEPING REPORT CSV"],
[""],
["Path dataset file", "Start Date Time", "End Date Time"],
[f"{path_file}", f"{start_datetime}", f"{end_datetime}"],
[""],
["N Polarimeters"],
[f"{len(name_pol.split())}"],
[""],
["Warnings List"],
[""],
[""]
]
# [CSV] Open and append information
with open(f'{csv_output_dir}/HK_Report_{start_datetime}__{end_datetime}.csv',
'a', newline='') as file:
writer = csv.writer(file)
writer.writerows(csv_general)
logging.info("####################\n"
"CSV Report updated: Heading written.\n####################\n")
# ------------------------------------------------------------------------------------------------------------------
# [MD] Initializing warning lists
t_warn = []
sampling_warn = []
corr_warn = []
# General warning lists used in case of repetitions
gen_warn = []
# root: location of the file.txt with the information to build the report
root = "templates/validation_templates"
templates_dir = Path(root)
# Creating the Jinja2 environment
env = Environment(loader=FileSystemLoader(templates_dir))
# ------------------------------------------------------------------------------------------------------------------
logging.info('Ready to analyze the HouseKeeping Parameters.\n')
####################################################################################################################
# HOUSEKEEPING PARAMETERS - Single Polarimeter
####################################################################################################################
# Converting the string of polarimeters into a list
name_pol = name_pol.split()
# Repeating the analysis for all the polarimeters in the list
for np in name_pol:
# Messages for report and user
# --------------------------------------------------------------------------------------------------------------
msg = f'Parsing {np}'
# [CSV]
csv_general = [
[""],
[f"{msg}"],
[""]
]
# [CSV] write which polarimeter is parsed ---------------------------------------------------------
with open(f'{csv_output_dir}/HK_Report_{start_datetime}__{end_datetime}.csv',
'a', newline='') as file:
writer = csv.writer(file)
writer.writerows(csv_general)
# -------------------------------------------------------------------------------------------------
# [MD]
# Updating the report_data dict
report_data.update({"pol_name": np})
logging.warning(f'--------------------------------------------------------------------------------------'
f'\n{msg}\n')
# --------------------------------------------------------------------------------------------------------------
# Initializing a Polarimeter
p = pol.Polarimeter(name_pol=np, path_file=path_file,
start_datetime=start_datetime, end_datetime=end_datetime, output_plot_dir=output_plot_dir)
# Loading the HK
logging.warning('--------------------------------------------------------------------------------------'
f'\nHousekeeping Analysis of {np}.\nLoading HK.\n')
p.Load_HouseKeeping()
# HK Sampling warnings -------------------------------------------------------------------------------------
HK_sampling_table = p.HK_Sampling_Table(sam_exp_med=hk_sam_exp_med, sam_tolerance=hk_sam_tolerance)
# [MD] Storing HK sampling table
sampling_warn.extend(HK_sampling_table["md"])
# [CSV] Storing HK sampling table
csv_general = HK_sampling_table["csv"]
# ----------------------------------------------------------------------------------------------------------
# Normalizing the HK measures
logging.info(f'Polarimeter {np}: Normalizing HK.\n')
problematic_hk = p.Norm_HouseKeeping()
# HK Time warnings -----------------------------------------------------------------------------------------
# [MD] Storing problematic HK (time warnings)
t_warn.extend(p.warnings["time_warning"])
# [CSV] Storing problematic HK
csv_general.append(problematic_hk)
# ----------------------------------------------------------------------------------------------------------
# [CSV] REPORT: write HK sampling & time warnings in the report --------------------------------------------
with open(f'{csv_output_dir}/HK_Report_{start_datetime}__{end_datetime}.csv',
'a', newline='') as file:
writer = csv.writer(file)
writer.writerows(csv_general)
logging.info(f"####################\n"
f"CSV Report updated: HK {np} Sampling Table written.\n####################\n")
# ----------------------------------------------------------------------------------------------------------
# Analyzing HK and collecting the results
logging.info('Analyzing HK.\n')
hk_results = p.Analyse_HouseKeeping()
# Preparing tables for the report
logging.info(f'Polarimeter {np}: Producing HK table for the report.\n')
HK_table = p.HK_table(results=hk_results)
# [CSV] Storing the HK results table
csv_general = HK_table["csv"]
# [CSV] REPORT: write HK Table in the report ---------------------------------------------------------------
with open(f'{csv_output_dir}/HK_Report_{start_datetime}__{end_datetime}.csv',
'a', newline='') as file:
writer = csv.writer(file)
writer.writerows(csv_general)
logging.info(f"####################\n"
f"CSV Report updated: HK {np} Table written.\n####################\n")
# ----------------------------------------------------------------------------------------------------------
# Plots of the Bias HK (Voltages and Currents) and of the Offsets
logging.info(f'Polarimeter {np}: Plotting Bias HK and Offsets.\n')
for hk_kind in p.hk_list.keys():
p.Plot_Housekeeping(hk_kind=hk_kind, show=False)
# ----------------------------------------------------------------------------------------------------------
# [MD] REPORT HK
# ----------------------------------------------------------------------------------------------------------
logging.info(f"\nOnce ready, I will put the HK report into: {output_report_dir}.\n")
# Updating the report_data dict
report_data.update({"hk_table": HK_table["md"]})
# Getting instructions to create the HK report
template_hk = env.get_template('report_hk.txt')
# [MD] Report HK generation
filename = Path(f"{output_report_dir}/4_report_hk.md")
# Overwrite reports produced through the previous version of the pipeline
if first_report:
# Create a new white file where to write
with open(filename, 'w') as outf:
outf.write(template_hk.render(report_data))
# Avoid overwriting between following polarimeters
else:
# Append at the end of the file
with open(filename, 'a') as outf:
outf.write(template_hk.render(report_data))
logging.info("###########################################################################################\n"
f"HK Parameters of Pol {np} - Markdown Report Ready!\n\n")
# --------------------------------------------------------------------------------------------------------------
################################################################################################################
# CORRELATION Plots and Matrices: HK
################################################################################################################
if corr_plot or corr_mat:
# List used to contain all possible data combinations to calculate correlations
possible_combos = []
# ----------------------------------------------------------------------------------------------------------
# HK Correlations
# Bias Currents I vs Bias Voltages V
# Bias Currents I Self Correlations
# Bias Voltages V Self Correlations
# ----------------------------------------------------------------------------------------------------------
# Collecting all possible combinations of HK correlations
# Note: time is fixed for all I and V: hence the timestamps are defined by the first HK parameter
possible_combos.extend([
# I vs V
(p.hk["I"], p.hk_t["I"]["ID0_HK"], "Bias_I", "[µA]",
p.hk["V"], p.hk_t["V"]["VD0_HK"], "Bias_V", "[mV]"),
# I Self Correlations
(p.hk["I"], p.hk_t["I"]["ID0_HK"], "Bias_I", "[µA]",
{}, [], "Self_Corr", "[µA]"),
# V Self Correlations
(p.hk["V"], p.hk_t["V"]["VD0_HK"], "Bias_V", "[mV]",
{}, [], "Self_Corr", "[mV]")
])
# Produce all correlation plots and matrix using all the combinations
for d1, t1, n1, u1, d2, t2, n2, u2 in possible_combos:
# --------------------------------------------------------------------------------------------------
# Correlation Plot
# --------------------------------------------------------------------------------------------------
if corr_plot:
logging.warning(
f'---------------------------------------------------------------------------------'
f'\nPolarimeter {np}: Correlation plot with threshold {corr_t}. '
f'\n{n1} - {n2}.\n')
# Store correlation warnings from the correlation plot
correlation_warnings = fz_c.correlation_plot(list1=[], list2=[],
dict1=d1, dict2=d2,
time1=list(t1), time2=list(t2),
data_name1=f"{n1}", data_name2=f"{n2}",
measure_unit1=f"{u1}", measure_unit2=f"{u2}",
start_datetime=start_datetime,
show=False,
corr_t=corr_t,
plot_dir=output_plot_dir)
# [MD] Collecting correlation warnings
gen_warn = correlation_warnings["md"]
# [CSV] Collecting correlation warnings
csv_general = correlation_warnings["csv"]
# --------------------------------------------------------------------------------------------------
# Correlation Matrix
# --------------------------------------------------------------------------------------------------
if corr_mat:
logging.warning(
f'---------------------------------------------------------------------------------'
f'\nPolarimeter {np}: Correlation matrix with threshold {corr_t}. '
f'\n{n1} - {n2}.\n')
# Store/Overwrite correlation warnings from the correlation matrix
correlation_warnings = fz_c.correlation_mat(dict1=d1, dict2=d2,
data_name1=f"{n1}", data_name2=f"{n2}",
start_datetime=start_datetime,
show=False, plot_dir=output_plot_dir)
# [MD] Collecting correlation warnings
gen_warn = correlation_warnings["md"]
# [CSV] Collecting correlation warnings
csv_general = correlation_warnings["csv"]
# Store correlation warnings (only once, to avoid repetitions)
# [MD] Collecting correlation warnings
corr_warn.extend(gen_warn)
# [CSV] REPORT: write Correlation warnings in the report -------------------------------------------
with open(f'{csv_output_dir}/HK_Report_{start_datetime}__{end_datetime}.csv',
'a', newline='') as file:
writer = csv.writer(file)
writer.writerows(csv_general)
logging.info(f"####################\n"
f"CSV Report updated: {np} correlations {n1} - {n2}.\n####################\n")
# --------------------------------------------------------------------------------------------------
# Updating the bool first_report to False at the end of the first for cycle: from now on the info are appended
first_report = False
# ------------------------------------------------------------------------------------------------------
# [MD] REPORT WARNINGS
# ------------------------------------------------------------------------------------------------------
# Updating the report_data dict for the warning report
report_data.update({"t_warn": t_warn,
"sampling_warn": sampling_warn,
"corr_warn": corr_warn
})
# Getting instructions to create the head of the report
template_w = env.get_template('report_warnings.txt')
# Report generation
filename = Path(f"{output_report_dir}/2_report_tot_warnings.md")
with open(filename, 'w') as outf:
outf.write(template_w.render(report_data))
# Stopping chronometer
end_code_time = time.time()
# Calculate running time of the code
elapsed_time = end_code_time - start_code_time
# Printing the elapsed time
logging.info(f"############################################################################################\n"
f"Elapsed Time: {round(elapsed_time, 2)} s ({(round(elapsed_time/60., 2))} min)\n"
"############################################################################################\n")
return