Skip to content

Commit

Permalink
Add try-catch for exception during parsing
Browse files Browse the repository at this point in the history
Adds row with "ERROR" for all values
rather than crashing during printing in
CSV file.
  • Loading branch information
gargnitingoogle committed Nov 27, 2024
1 parent 7486d62 commit 8e02828
Show file tree
Hide file tree
Showing 2 changed files with 60 additions and 46 deletions.
53 changes: 30 additions & 23 deletions perfmetrics/scripts/testing_on_gke/examples/dlio/parse_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,29 +304,36 @@ def writeOutput(
)
continue

new_row = (
record_set["mean_file_size"],
record_set["num_files_train"],
total_size,
record_set["batch_size"],
scenario,
r["epoch"],
r["duration"],
r["train_au_percentage"],
r["train_throughput_samples_per_second"],
r["train_throughput_mb_per_second"],
r["throughput_over_local_ssd"],
r["lowest_memory"],
r["highest_memory"],
r["lowest_cpu"],
r["highest_cpu"],
r["pod_name"],
r["start"],
r["end"],
f'"{r["gcsfuse_mount_options"].strip()}"', # need to wrap in quotes to encapsulate commas in the value.
args.instance_id,
)
rows.append(new_row)
try:
new_row = (
record_set["mean_file_size"],
record_set["num_files_train"],
total_size,
record_set["batch_size"],
scenario,
r["epoch"],
r["duration"],
r["train_au_percentage"],
r["train_throughput_samples_per_second"],
r["train_throughput_mb_per_second"],
r["throughput_over_local_ssd"],
r["lowest_memory"],
r["highest_memory"],
r["lowest_cpu"],
r["highest_cpu"],
r["pod_name"],
r["start"],
r["end"],
f'"{r["gcsfuse_mount_options"].strip()}"', # need to wrap in quotes to encapsulate commas in the value.
args.instance_id,
)
rows.append(new_row)
except Exception as e:
print(
f"Error while creating new output row for key={key},"
f" scenario={scenario}, epoch={i}, r={r}: {e}"
)
rows.append((("ERROR",) * len(_HEADER)))

export_to_csv(output_file_path=args.output_file, header=_HEADER, rows=rows)
export_to_gsheet(
Expand Down
53 changes: 30 additions & 23 deletions perfmetrics/scripts/testing_on_gke/examples/fio/parse_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,29 +338,36 @@ def writeOutput(
)
continue

new_row = (
record_set["mean_file_size"],
record_set["read_type"],
scenario,
r["epoch"],
r["duration"],
r["throughput_mb_per_second"],
r["IOPS"],
r["throughput_over_local_ssd"],
r["lowest_memory"],
r["highest_memory"],
r["lowest_cpu"],
r["highest_cpu"],
r["pod_name"],
r["start"],
r["end"],
f'"{r["gcsfuse_mount_options"].strip()}"', # need to wrap in quotes to encapsulate commas in the value.
r["blockSize"],
r["filesPerThread"],
r["numThreads"],
args.instance_id,
)
rows.append(new_row)
try:
new_row = (
record_set["mean_file_size"],
record_set["read_type"],
scenario,
r["epoch"],
r["duration"],
r["throughput_mb_per_second"],
r["IOPS"],
r["throughput_over_local_ssd"],
r["lowest_memory"],
r["highest_memory"],
r["lowest_cpu"],
r["highest_cpu"],
r["pod_name"],
r["start"],
r["end"],
f'"{r["gcsfuse_mount_options"].strip()}"', # need to wrap in quotes to encapsulate commas in the value.
r["blockSize"],
r["filesPerThread"],
r["numThreads"],
args.instance_id,
)
rows.append(new_row)
except Exception as e:
print(
f"Error while creating new output row for key={key},"
f" scenario={scenario}, epoch={i}, r={r}: {e}"
)
rows.append((("ERROR",) * len(_HEADER)))

export_to_csv(output_file_path=args.output_file, header=_HEADER, rows=rows)
export_to_gsheet(
Expand Down

0 comments on commit 8e02828

Please sign in to comment.