Update Shippable downloader to support new API.

- Download run/jobs by URL (old or new).
- Download run metadata.
- Download job metadata.
- Format downloaded JSON.
- Handle and save coverage/testresults download errors.
pull/22960/head
Matt Clay 8 years ago
parent 318445fe19
commit dfe8eca3d2

@ -39,7 +39,8 @@ def main():
parser = ArgumentParser(description='Download results from a Shippable run.') parser = ArgumentParser(description='Download results from a Shippable run.')
parser.add_argument('run_id', parser.add_argument('run_id',
help='shippable run id.') metavar='RUN',
help='shippable run id, run url or run name formatted as: account/project/run_number')
parser.add_argument('-v', '--verbose', parser.add_argument('-v', '--verbose',
dest='verbose', dest='verbose',
@ -69,11 +70,20 @@ def main():
action='store_true', action='store_true',
help='download code coverage results') help='download code coverage results')
parser.add_argument('--job-metadata',
action='store_true',
help='download job metadata')
parser.add_argument('--run-metadata',
action='store_true',
help='download run metadata')
parser.add_argument('--all', parser.add_argument('--all',
action='store_true', action='store_true',
help='download everything') help='download everything')
parser.add_argument('--job-number', parser.add_argument('--job-number',
metavar='N',
action='append', action='append',
type=int, type=int,
help='limit downloads to the given job number') help='limit downloads to the given job number')
@ -83,57 +93,145 @@ def main():
args = parser.parse_args() args = parser.parse_args()
old_runs_prefix = 'https://app.shippable.com/runs/'
if args.run_id.startswith(old_runs_prefix):
args.run_id = args.run_id[len(old_runs_prefix):]
if args.all: if args.all:
args.console_logs = True args.console_logs = True
args.test_results = True args.test_results = True
args.coverage_results = True args.coverage_results = True
args.job_metadata = True
args.run_metadata = True
selections = (
args.console_logs,
args.test_results,
args.coverage_results,
args.job_metadata,
args.run_metadata,
)
if not args.console_logs and not args.test_results and not args.coverage_results: if not any(selections):
parser.error('At least one download option is required: --console-logs, --test-results, --coverage-results') parser.error('At least one download option is required.')
headers = dict( headers = dict(
Authorization='apiToken %s' % args.api_key, Authorization='apiToken %s' % args.api_key,
) )
match = re.search(r'^https://app.shippable.com/github/(?P<account>[^/]+)/(?P<project>[^/]+)/runs/(?P<run_number>[0-9]+)$', args.run_id)
if not match:
match = re.search(r'^(?P<account>[^/]+)/(?P<project>[^/]+)/(?P<run_number>[0-9]+)$', args.run_id)
if match:
account = match.group('account')
project = match.group('project')
run_number = int(match.group('run_number'))
url = 'https://api.shippable.com/projects'
response = requests.get(url, dict(projectFullNames='%s/%s' % (account, project)), headers=headers)
if response.status_code != 200:
raise Exception(response.content)
project_id = response.json()[0]['id']
url = 'https://api.shippable.com/runs?projectIds=%s' % project_id
response = requests.get(url, headers=headers)
if response.status_code != 200:
raise Exception(response.content)
run = [run for run in response.json() if run['runNumber'] == run_number][0]
args.run_id = run['id']
else:
url = 'https://api.shippable.com/runs/%s' % args.run_id
response = requests.get(url, headers=headers)
if response.status_code != 200:
raise Exception(response.content)
run = response.json()
account = run['subscriptionOrgName']
project = run['projectName']
run_number = run['runNumber']
output_dir = '%s/%s/%s' % (account, project, run_number)
response = requests.get('https://api.shippable.com/jobs?runIds=%s' % args.run_id, headers=headers) response = requests.get('https://api.shippable.com/jobs?runIds=%s' % args.run_id, headers=headers)
if response.status_code != 200: if response.status_code != 200:
raise Exception(response.content) raise Exception(response.content)
body = response.json() jobs = sorted(response.json(), key=lambda job: int(job['jobNumber']))
output_dir = args.run_id
if not args.test: if not args.test:
if not os.path.exists(output_dir): if not os.path.exists(output_dir):
os.mkdir(output_dir) os.makedirs(output_dir)
if args.run_metadata:
path = os.path.join(output_dir, 'run.json')
contents = json.dumps(run, sort_keys=True, indent=4)
if args.verbose or args.test:
print(path)
for j in body: if not args.test:
with open(path, 'w') as metadata_fd:
metadata_fd.write(contents)
for j in jobs:
job_id = j['id'] job_id = j['id']
job_number = j['jobNumber'] job_number = j['jobNumber']
if args.job_number and job_number not in args.job_number: if args.job_number and job_number not in args.job_number:
continue continue
if args.job_metadata:
path = os.path.join(output_dir, '%s/job.json' % job_number)
contents = json.dumps(j, sort_keys=True, indent=4)
if args.verbose or args.test:
print(path)
if not args.test:
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
with open(path, 'w') as metadata_fd:
metadata_fd.write(contents)
if args.console_logs: if args.console_logs:
path = os.path.join(output_dir, '%s-console.log' % job_number) path = os.path.join(output_dir, '%s/console.log' % job_number)
url = 'https://api.shippable.com/jobs/%s/consoles?download=true' % job_id url = 'https://api.shippable.com/jobs/%s/consoles?download=true' % job_id
download(args, headers, path, url) download(args, headers, path, url, is_json=False)
if args.test_results: if args.test_results:
path = os.path.join(output_dir, '%s-test.json' % job_number) path = os.path.join(output_dir, '%s/test.json' % job_number)
url = 'https://api.shippable.com/jobs/%s/jobTestReports' % job_id url = 'https://api.shippable.com/jobs/%s/jobTestReports' % job_id
download(args, headers, path, url) download(args, headers, path, url)
extract_contents(args, path, os.path.join(output_dir, '%s-test' % job_number)) extract_contents(args, path, os.path.join(output_dir, '%s/test' % job_number))
if args.coverage_results: if args.coverage_results:
path = os.path.join(output_dir, '%s-coverage.json' % job_number) path = os.path.join(output_dir, '%s/coverage.json' % job_number)
url = 'https://api.shippable.com/jobs/%s/jobCoverageReports' % job_id url = 'https://api.shippable.com/jobs/%s/jobCoverageReports' % job_id
download(args, headers, path, url) download(args, headers, path, url)
extract_contents(args, path, os.path.join(output_dir, '%s-coverage' % job_number)) extract_contents(args, path, os.path.join(output_dir, '%s/coverage' % job_number))
def extract_contents(args, path, output_dir): def extract_contents(args, path, output_dir):
if not args.test: if not args.test:
if not os.path.exists(path):
return
with open(path, 'r') as json_fd: with open(path, 'r') as json_fd:
items = json.load(json_fd) items = json.load(json_fd)
@ -149,12 +247,15 @@ def extract_contents(args, path, output_dir):
if args.verbose: if args.verbose:
print(path) print(path)
if path.endswith('.json'):
contents = json.dumps(json.loads(contents), sort_keys=True, indent=4)
if not os.path.exists(path): if not os.path.exists(path):
with open(path, 'w') as output_fd: with open(path, 'w') as output_fd:
output_fd.write(contents) output_fd.write(contents)
def download(args, headers, path, url): def download(args, headers, path, url, is_json=True):
if args.verbose or args.test: if args.verbose or args.test:
print(path) print(path)
@ -165,9 +266,17 @@ def download(args, headers, path, url):
response = requests.get(url, headers=headers) response = requests.get(url, headers=headers)
if response.status_code != 200: if response.status_code != 200:
raise Exception(response.content) path += '.error'
if is_json:
content = json.dumps(response.json(), sort_keys=True, indent=4)
else:
content = response.content
directory = os.path.dirname(path)
content = response.content if not os.path.exists(directory):
os.makedirs(directory)
with open(path, 'w') as f: with open(path, 'w') as f:
f.write(content) f.write(content)

Loading…
Cancel
Save