Skip to content

Commit

Permalink
Merge branch 'google:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
vanhauser-thc authored Oct 17, 2024
2 parents b6fca1f + 2920e74 commit dc35825
Show file tree
Hide file tree
Showing 6 changed files with 34 additions and 3 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -44,3 +44,6 @@ docker/generated.mk

# Vim backup files.
.*.swp

# Diff files from matplotlib
*-failed-diff.png
Binary file not shown.
5 changes: 5 additions & 0 deletions common/experiment_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,11 @@ def get_corpus_archive_name(cycle: int) -> str:
return get_cycle_filename('corpus-archive', cycle) + '.tar.gz'


def get_coverage_archive_name(cycle: int) -> str:
"""Returns a corpus archive name given a cycle."""
return get_cycle_filename('coverage-archive', cycle) + '.json'


def get_stats_filename(cycle: int) -> str:
"""Returns a corpus archive name given a cycle."""
return get_cycle_filename('stats', cycle) + '.json'
Expand Down
26 changes: 25 additions & 1 deletion experiment/measurer/measure_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import collections
import gc
import glob
import gzip
import multiprocessing
import json
import os
Expand Down Expand Up @@ -614,10 +615,33 @@ def measure_snapshot_coverage( # pylint: disable=too-many-locals
# Generate profdata and transform it into json form.
snapshot_measurer.generate_coverage_information(cycle)

# Compress and save the exported profdata snapshot.
coverage_archive_zipped = os.path.join(
snapshot_measurer.trial_dir, 'coverage',
experiment_utils.get_coverage_archive_name(cycle) + '.gz')

coverage_archive_dir = os.path.dirname(coverage_archive_zipped)
if not os.path.exists(coverage_archive_dir):
os.makedirs(coverage_archive_dir)

with gzip.open(str(coverage_archive_zipped), 'wb') as compressed:
with open(snapshot_measurer.cov_summary_file, 'rb') as uncompressed:
# avoid saving warnings so we can direct import with pandas
compressed.write(uncompressed.readlines()[-1])

coverage_archive_dst = exp_path.filestore(coverage_archive_zipped)
if filestore_utils.cp(coverage_archive_zipped,
coverage_archive_dst,
expect_zero=False).retcode:
snapshot_logger.warning('Coverage not found for cycle: %d.', cycle)
return None

os.remove(coverage_archive_zipped) # no reason to keep this around

# Run crashes again, parse stacktraces and generate crash signatures.
crashes = snapshot_measurer.process_crashes(cycle)

# Get the coverage of the new corpus units.
# Get the coverage summary of the new corpus units.
branches_covered = snapshot_measurer.get_current_coverage()
fuzzer_stats_data = snapshot_measurer.get_fuzzer_stats(cycle)
snapshot = models.Snapshot(time=this_time,
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ redis==4.3.4
rq==1.11.1
scikit-posthocs==0.7.0
scipy==1.9.2
seaborn==0.12.0
seaborn==0.13.2
sqlalchemy==1.4.41
protobuf==3.20.3

Expand Down
1 change: 0 additions & 1 deletion service/gcbrun_experiment.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
TRIGGER_COMMAND = '/gcbrun'
RUN_EXPERIMENT_COMMAND_STR = f'{TRIGGER_COMMAND} run_experiment.py '
SKIP_COMMAND_STR = f'{TRIGGER_COMMAND} skip'
# A DUMMY COMMENT


def get_comments(pull_request_number):
Expand Down

0 comments on commit dc35825

Please sign in to comment.