Skip to content

Commit

Permalink
temp fix for 50000 token limit in gsheet for larger files
Browse files Browse the repository at this point in the history
  • Loading branch information
ankitaluthra1 committed Nov 30, 2024
1 parent 2839d49 commit b825474
Showing 1 changed file with 9 additions and 3 deletions.
12 changes: 9 additions & 3 deletions tools/log_analyser/outputgenerator/generate_gsheet.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def get_pattern_rows(handle, data, obj_bytes_list, obj_ranges_list, obj_pattern,
:param op:read/write
"""
if len(obj_pattern) > 0:
data.append([handle, op, "first " + op, 1, obj_bytes_list[0], obj_bytes_list[0], json.dumps([obj_bytes_list[0]]), json.dumps([obj_ranges_list[0]])])
data.append([handle, op, "first " + op, 1, "", "", json.dumps([]), json.dumps([])])
# data.append([handle, op, "first " + op, 1, obj_bytes_list[0], obj_bytes_list[0]])
if len(obj_pattern) > 1:
read_ranges = [obj_ranges_list[1]]
Expand All @@ -140,7 +140,7 @@ def get_pattern_rows(handle, data, obj_bytes_list, obj_ranges_list, obj_pattern,
json_read_bytes = json.dumps(read_bytes)
json_read_ranges = json.dumps(read_ranges)
avg_byte_size = np.mean(obj_bytes_list)
row = [handle, op, type_map[last_read], streak, avg_byte_size, byte_sum, json_read_bytes, json_read_ranges]
row = [handle, op, type_map[last_read], streak, avg_byte_size, byte_sum, "", ""]
# row = [handle, op, type_map[last_read], streak, avg_byte_size, byte_sum]
data.append(row)
last_read = obj_pattern[i]
Expand All @@ -158,7 +158,7 @@ def get_pattern_rows(handle, data, obj_bytes_list, obj_ranges_list, obj_pattern,
json_read_bytes = json.dumps(read_bytes)
json_read_ranges = json.dumps(read_ranges)
avg_byte_size = np.mean(obj_bytes_list)
row = [handle, op, type_map[last_read], streak, avg_byte_size, byte_sum, json_read_bytes, json_read_ranges]
row = [handle, op, type_map[last_read], streak, avg_byte_size, byte_sum, "", ""]
# row = [handle, op, type_map[last_read], streak, avg_byte_size, byte_sum]
data.append(row)

Expand All @@ -179,6 +179,7 @@ def read_pattern_writer(global_data, worksheet):
obj = global_data.name_object_map[name]
get_pattern_rows(name, data, obj.read_bytes, obj.read_ranges, obj.read_pattern, "read")
if len(data) > 0:
print(data)
worksheet.append_rows(data)


Expand Down Expand Up @@ -228,17 +229,22 @@ def main_gsheet_generator(global_data):
except gspread.exceptions.WorksheetNotFound:
pass
worksheet1.clear()
print("starting worksheet")
worksheet1.append_rows(call_data)
calls_data_writer(global_data, global_data.gcalls.gcs_calls, "GCS", "global", worksheet1)
calls_data_writer(global_data, global_data.gcalls.kernel_calls, "kernel", "global", worksheet1)
print("writing worksheet 1")
worksheet2.clear()
worksheet2.append_rows(handle_data)
handle_data_writer(global_data, worksheet2)
print("writing worksheet 2")
worksheet3.clear()
worksheet3.append_rows(pattern_data)
print("writing worksheet 3")
read_pattern_writer(global_data, worksheet3)
worksheet4.clear()
worksheet4.append_rows(max_entry_data)
print("writing worksheet 4")
max_entry_writer(global_data, worksheet4)
worksheet5.clear()
worksheet5.append_row(["message"])
Expand Down

0 comments on commit b825474

Please sign in to comment.