Skip to content

Commit 6f52f00

Browse files
address review comments
1 parent cef30a0 commit 6f52f00

File tree

2 files changed

+27
-25
lines changed

2 files changed

+27
-25
lines changed

perfmetrics/scripts/testing_on_gke/examples/dlio/parse_logs.py

Lines changed: 13 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
sys.path.append("../")
2424
from utils.utils import get_memory, get_cpu, standard_timestamp, is_mash_installed
2525

26-
LOCAL_LOGS_LOCATION = "../../bin/dlio-logs"
26+
_LOCAL_LOGS_LOCATION = "../../bin/dlio-logs"
2727

2828
record = {
2929
"pod_name": "",
@@ -49,13 +49,14 @@ def downloadDlioOutputs(dlioWorkloads):
4949
print(f"Downloading DLIO logs from the bucket {dlioWorkload.bucket}...")
5050
result = subprocess.run(
5151
[
52-
"gsutil",
53-
"-m", # download multiple files parallelly
54-
"-q", # download silently without any logs
52+
"gcloud",
53+
"-q", # ignore prompts
54+
"storage",
5555
"cp",
5656
"-r",
57+
"--no-user-output-enabled", # do not print names of files being copied
5758
f"gs://{dlioWorkload.bucket}/logs",
58-
LOCAL_LOGS_LOCATION,
59+
_LOCAL_LOGS_LOCATION,
5960
],
6061
capture_output=False,
6162
text=True,
@@ -68,11 +69,11 @@ def downloadDlioOutputs(dlioWorkloads):
6869
parser = argparse.ArgumentParser(
6970
prog="DLIO Unet3d test output parser",
7071
description=(
71-
"This program takes in a json test-config file and parses it for"
72-
" output buckets. From each output bucket, it downloads all the DLIO"
73-
" output logs from gs://<bucket>/logs/ localy to"
74-
f" {LOCAL_LOGS_LOCATION} and parses them for DLIO test runs and their"
75-
" output metrics."
72+
"This program takes in a json workload configuration file and parses"
73+
" it for valid DLIO workloads and the locations of their test outputs"
74+
" on GCS. It downloads each such output object locally to"
75+
" {_LOCAL_LOGS_LOCATION} and parses them for DLIO test runs, and then"
76+
" dumps their output metrics into a CSV report file."
7677
),
7778
)
7879
parser.add_argument(
@@ -94,7 +95,7 @@ def downloadDlioOutputs(dlioWorkloads):
9495
args = parser.parse_args()
9596

9697
try:
97-
os.makedirs(LOCAL_LOGS_LOCATION)
98+
os.makedirs(_LOCAL_LOGS_LOCATION)
9899
except FileExistsError:
99100
pass
100101

@@ -119,7 +120,7 @@ def downloadDlioOutputs(dlioWorkloads):
119120
if not mash_installed:
120121
print("Mash is not installed, will skip parsing CPU and memory usage.")
121122

122-
for root, _, files in os.walk(LOCAL_LOGS_LOCATION):
123+
for root, _, files in os.walk(_LOCAL_LOGS_LOCATION):
123124
if files:
124125
print(f"Parsing directory {root} ...")
125126
per_epoch_stats_file = root + "/per_epoch_stats.json"

perfmetrics/scripts/testing_on_gke/examples/fio/parse_logs.py

Lines changed: 14 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
sys.path.append("../")
2424
from utils.utils import get_memory, get_cpu, unix_to_timestamp, is_mash_installed
2525

26-
LOCAL_LOGS_LOCATION = "../../bin/fio-logs"
26+
_LOCAL_LOGS_LOCATION = "../../bin/fio-logs"
2727

2828
record = {
2929
"pod_name": "",
@@ -49,20 +49,21 @@
4949
def downloadFioOutputs(fioWorkloads):
5050
for fioWorkload in fioWorkloads:
5151
try:
52-
os.makedirs(LOCAL_LOGS_LOCATION + "/" + fioWorkload.fileSize)
52+
os.makedirs(_LOCAL_LOGS_LOCATION + "/" + fioWorkload.fileSize)
5353
except FileExistsError:
5454
pass
5555

5656
print(f"Downloading FIO outputs from {fioWorkload.bucket}...")
5757
result = subprocess.run(
5858
[
59-
"gsutil",
60-
"-m", # download multiple files parallelly
61-
"-q", # download silently without any logs
59+
"gcloud",
60+
"-q", # ignore prompts
61+
"storage",
6262
"cp",
6363
"-r",
64+
"--no-user-output-enabled", # do not print names of objects being copied
6465
f"gs://{fioWorkload.bucket}/fio-output",
65-
LOCAL_LOGS_LOCATION + "/" + fioWorkload.fileSize,
66+
_LOCAL_LOGS_LOCATION + "/" + fioWorkload.fileSize,
6667
],
6768
capture_output=False,
6869
text=True,
@@ -75,11 +76,11 @@ def downloadFioOutputs(fioWorkloads):
7576
parser = argparse.ArgumentParser(
7677
prog="DLIO Unet3d test output parser",
7778
description=(
78-
"This program takes in a json test-config file and parses it for"
79-
" output buckets. From each output bucket, it downloads all the FIO"
80-
" output logs from gs://<bucket>/logs/ locally to"
81-
f" {LOCAL_LOGS_LOCATION} and parses them for FIO test runs and their"
82-
" output metrics."
79+
"This program takes in a json workload configuration file and parses"
80+
" it for valid FIO workloads and the locations of their test outputs"
81+
" on GCS. It downloads each such output object locally to"
82+
" {_LOCAL_LOGS_LOCATION} and parses them for FIO test runs, and then"
83+
" dumps their output metrics into a CSV report file."
8384
),
8485
)
8586
parser.add_argument(
@@ -101,7 +102,7 @@ def downloadFioOutputs(fioWorkloads):
101102
args = parser.parse_args()
102103

103104
try:
104-
os.makedirs(LOCAL_LOGS_LOCATION)
105+
os.makedirs(_LOCAL_LOGS_LOCATION)
105106
except FileExistsError:
106107
pass
107108

@@ -125,7 +126,7 @@ def downloadFioOutputs(fioWorkloads):
125126
if not mash_installed:
126127
print("Mash is not installed, will skip parsing CPU and memory usage.")
127128

128-
for root, _, files in os.walk(LOCAL_LOGS_LOCATION):
129+
for root, _, files in os.walk(_LOCAL_LOGS_LOCATION):
129130
for file in files:
130131
per_epoch_output = root + f"/{file}"
131132
if not per_epoch_output.endswith(".json"):

0 commit comments

Comments
 (0)