mirror of
https://github.com/ethereum/solidity
synced 2023-10-03 13:03:40 +00:00
Skip external test benchmark diff instead of failing when previous run of the job did not succeed
This commit is contained in:
parent
3948391ca8
commit
2cea70c04f
@ -1257,12 +1257,17 @@ jobs:
|
||||
pr_id=$(echo "$CIRCLE_PULL_REQUEST" | sed 's|\(.*\)\/||')
|
||||
scripts_dir=../../../scripts
|
||||
|
||||
"${scripts_dir}/externalTests/download_benchmarks.py" --base-of-pr "$pr_id"
|
||||
# Our main goal here is to provide new benchmarks, the diff is optional. When benchmarks from
|
||||
# the previous run are not available for whatever reason, we still succeed and just skip the diff.
|
||||
# download_benchmarks.py exits with status 2 in that case.
|
||||
if "${scripts_dir}/externalTests/download_benchmarks.py" --base-of-pr "$pr_id" || [[ $? == 2 ]]; then
|
||||
echo 'export SKIP_BENCHMARK_DIFF=true' >> $BASH_ENV
|
||||
fi
|
||||
fi
|
||||
- run:
|
||||
name: Diff benchmarks
|
||||
command: |
|
||||
if [[ $CIRCLE_PULL_REQUEST != "" ]]; then
|
||||
if [[ $CIRCLE_PULL_REQUEST != "" && $SKIP_BENCHMARK_DIFF != "true" ]]; then
|
||||
cd reports/externalTests/
|
||||
mkdir diff/
|
||||
scripts_dir=../../scripts
|
||||
|
@ -11,7 +11,28 @@ import requests
|
||||
class APIHelperError(Exception):
|
||||
pass
|
||||
|
||||
class DataUnavailable(APIHelperError):
|
||||
class JobNotSuccessful(APIHelperError):
|
||||
def __init__(self, name: str, status: str):
|
||||
assert status != 'success'
|
||||
|
||||
self.name = name
|
||||
self.status = status
|
||||
self.job_finished = (status in ['failed', 'blocked'])
|
||||
|
||||
if status == 'not_running':
|
||||
message = f"Job {name} has not started yet."
|
||||
elif status == 'blocked':
|
||||
message = f"Job {name} will not run because one of its dependencies failed."
|
||||
elif status == 'running':
|
||||
message = f"Job {name} is still running."
|
||||
elif status == 'failed':
|
||||
message = f"Job {name} failed."
|
||||
else:
|
||||
message = f"Job {name} did not finish successfully. Current status: {status}."
|
||||
|
||||
super().__init__(message)
|
||||
|
||||
class JobMissing(APIHelperError):
|
||||
pass
|
||||
|
||||
class InvalidResponse(APIHelperError):
|
||||
@ -145,13 +166,10 @@ class CircleCI:
|
||||
def job(self, workflow_id: str, name: str, require_success: bool = False) -> dict:
|
||||
jobs = self.jobs(workflow_id)
|
||||
if name not in jobs:
|
||||
raise DataUnavailable(f"Job {name} is not present in the workflow.")
|
||||
raise JobMissing(f"Job {name} is not present in the workflow.")
|
||||
|
||||
if require_success and jobs[name]['status'] != 'success':
|
||||
raise DataUnavailable(
|
||||
f"Job {name} has failed or is still running. "
|
||||
f"Current status: {jobs[name]['status']}."
|
||||
)
|
||||
raise JobNotSuccessful(name, jobs[name]['status'])
|
||||
|
||||
return jobs[name]
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from argparse import ArgumentParser, Namespace
|
||||
from enum import Enum, unique
|
||||
from pathlib import Path
|
||||
from typing import Mapping, Optional
|
||||
import sys
|
||||
@ -13,10 +14,18 @@ SCRIPTS_DIR = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(SCRIPTS_DIR))
|
||||
|
||||
from common.git_helpers import git_current_branch, git_commit_hash
|
||||
from common.rest_api_helpers import APIHelperError, CircleCI, Github, download_file
|
||||
from common.rest_api_helpers import APIHelperError, JobNotSuccessful, CircleCI, Github, download_file
|
||||
# pragma pylint: enable=import-error,wrong-import-position
|
||||
|
||||
|
||||
@unique
|
||||
class Status(Enum):
|
||||
OK = 0 # Benchmarks downloaded successfully
|
||||
ERROR = 1 # Error in the script, bad API response, unexpected data, etc.
|
||||
NO_BENCHMARK = 2 # Benchmark collector job did not finish successfully and/or benchmark artifacts are missing.
|
||||
PENDING = 3 # Benchmark collector job has not finished yet.
|
||||
|
||||
|
||||
def process_commandline() -> Namespace:
|
||||
script_description = (
|
||||
"Downloads benchmark results attached as artifacts to the c_ext_benchmarks job on CircleCI. "
|
||||
@ -76,14 +85,16 @@ def download_benchmark_artifact(
|
||||
commit_hash: str,
|
||||
overwrite: bool,
|
||||
silent: bool = False
|
||||
):
|
||||
) -> bool:
|
||||
if not silent:
|
||||
print(f"Downloading artifact: {benchmark_name}-{branch}-{commit_hash[:8]}.json.")
|
||||
|
||||
artifact_path = f'reports/externalTests/{benchmark_name}.json'
|
||||
|
||||
if artifact_path not in artifacts:
|
||||
raise RuntimeError(f"Missing artifact: {artifact_path}.")
|
||||
if not silent:
|
||||
print(f"Missing artifact: {artifact_path}.")
|
||||
return False
|
||||
|
||||
download_file(
|
||||
artifacts[artifact_path]['url'],
|
||||
@ -91,6 +102,8 @@ def download_benchmark_artifact(
|
||||
overwrite,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def download_benchmarks(
|
||||
branch: Optional[str],
|
||||
@ -100,7 +113,7 @@ def download_benchmarks(
|
||||
overwrite: bool = False,
|
||||
debug_requests: bool = False,
|
||||
silent: bool = False,
|
||||
):
|
||||
) -> Status:
|
||||
github = Github('ethereum/solidity', debug_requests)
|
||||
circleci = CircleCI('ethereum/solidity', debug_requests)
|
||||
|
||||
@ -141,32 +154,41 @@ def download_benchmarks(
|
||||
|
||||
artifacts = circleci.artifacts(int(benchmark_collector_job['job_number']))
|
||||
|
||||
download_benchmark_artifact(artifacts, 'summarized-benchmarks', branch, actual_commit_hash, overwrite, silent)
|
||||
download_benchmark_artifact(artifacts, 'all-benchmarks', branch, actual_commit_hash, overwrite, silent)
|
||||
got_summary = download_benchmark_artifact(artifacts, 'summarized-benchmarks', branch, actual_commit_hash, overwrite, silent)
|
||||
got_full = download_benchmark_artifact(artifacts, 'all-benchmarks', branch, actual_commit_hash, overwrite, silent)
|
||||
|
||||
return Status.OK if got_summary and got_full else Status.NO_BENCHMARK
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
options = process_commandline()
|
||||
download_benchmarks(
|
||||
return download_benchmarks(
|
||||
options.branch,
|
||||
options.pull_request_id,
|
||||
options.base_of_pr,
|
||||
options.ignore_commit_hash,
|
||||
options.overwrite,
|
||||
options.debug_requests,
|
||||
)
|
||||
|
||||
return 0
|
||||
).value
|
||||
except JobNotSuccessful as exception:
|
||||
print(f"[ERROR] {exception}", file=sys.stderr)
|
||||
if not exception.job_finished:
|
||||
print("Please wait for the workflow to finish and try again.", file=sys.stderr)
|
||||
return Status.PENDING.value
|
||||
else:
|
||||
print("Benchmarks from this run of the pipeline are not available.", file=sys.stderr)
|
||||
return Status.NO_BENCHMARK.value
|
||||
except APIHelperError as exception:
|
||||
print(f"[ERROR] {exception}", file=sys.stderr)
|
||||
return 1
|
||||
return Status.ERROR.value
|
||||
except requests.exceptions.HTTPError as exception:
|
||||
print(f"[ERROR] {exception}", file=sys.stderr)
|
||||
return 1
|
||||
return Status.ERROR.value
|
||||
except RuntimeError as exception:
|
||||
print(f"[ERROR] {exception}", file=sys.stderr)
|
||||
return 1
|
||||
return Status.ERROR.value
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
Loading…
Reference in New Issue
Block a user