23
23
sys .path .append ("../" )
24
24
from utils .utils import get_memory , get_cpu , unix_to_timestamp , is_mash_installed
25
25
26
- LOCAL_LOGS_LOCATION = "../../bin/fio-logs"
26
+ _LOCAL_LOGS_LOCATION = "../../bin/fio-logs"
27
27
28
28
record = {
29
29
"pod_name" : "" ,
49
49
def downloadFioOutputs (fioWorkloads ):
50
50
for fioWorkload in fioWorkloads :
51
51
try :
52
- os .makedirs (LOCAL_LOGS_LOCATION + "/" + fioWorkload .fileSize )
52
+ os .makedirs (_LOCAL_LOGS_LOCATION + "/" + fioWorkload .fileSize )
53
53
except FileExistsError :
54
54
pass
55
55
56
56
print (f"Downloading FIO outputs from { fioWorkload .bucket } ..." )
57
57
result = subprocess .run (
58
58
[
59
- "gsutil " ,
60
- "-m " , # download multiple files parallelly
61
- "-q" , # download silently without any logs
59
+ "gcloud " ,
60
+ "-q " , # ignore prompts
61
+ "storage" ,
62
62
"cp" ,
63
63
"-r" ,
64
+ "--no-user-output-enabled" , # do not print names of objects being copied
64
65
f"gs://{ fioWorkload .bucket } /fio-output" ,
65
- LOCAL_LOGS_LOCATION + "/" + fioWorkload .fileSize ,
66
+ _LOCAL_LOGS_LOCATION + "/" + fioWorkload .fileSize ,
66
67
],
67
68
capture_output = False ,
68
69
text = True ,
@@ -75,11 +76,11 @@ def downloadFioOutputs(fioWorkloads):
75
76
parser = argparse .ArgumentParser (
76
77
prog = "DLIO Unet3d test output parser" ,
77
78
description = (
78
- "This program takes in a json test-config file and parses it for "
79
- " output buckets. From each output bucket, it downloads all the FIO "
80
- " output logs from gs://<bucket>/logs/ locally to"
81
- f " { LOCAL_LOGS_LOCATION } and parses them for FIO test runs and their "
82
- " output metrics."
79
+ "This program takes in a json workload configuration file and parses"
80
+ " it for valid FIO workloads and the locations of their test outputs "
81
+ " on GCS. It downloads each such output object locally to"
82
+ " {_LOCAL_LOGS_LOCATION } and parses them for FIO test runs, and then "
83
+ " dumps their output metrics into a CSV report file ."
83
84
),
84
85
)
85
86
parser .add_argument (
@@ -101,7 +102,7 @@ def downloadFioOutputs(fioWorkloads):
101
102
args = parser .parse_args ()
102
103
103
104
try :
104
- os .makedirs (LOCAL_LOGS_LOCATION )
105
+ os .makedirs (_LOCAL_LOGS_LOCATION )
105
106
except FileExistsError :
106
107
pass
107
108
@@ -125,7 +126,7 @@ def downloadFioOutputs(fioWorkloads):
125
126
if not mash_installed :
126
127
print ("Mash is not installed, will skip parsing CPU and memory usage." )
127
128
128
- for root , _ , files in os .walk (LOCAL_LOGS_LOCATION ):
129
+ for root , _ , files in os .walk (_LOCAL_LOGS_LOCATION ):
129
130
for file in files :
130
131
per_epoch_output = root + f"/{ file } "
131
132
if not per_epoch_output .endswith (".json" ):
0 commit comments