Created
December 16, 2020 16:38
-
-
Save tomrittervg/cfa1467833459faa276e1ab85ccdae7a to your computer and use it in GitHub Desktop.
mozregression but on MinGW builds using TC artifacts from mozilla-central
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python3 | |
import re | |
import json | |
import logging | |
import dateutil.parser as parser | |
import datetime | |
import requests | |
import argparse | |
COUNT=500 | |
PUSH_LIST_URL="https://treeherder.mozilla.org/api/project/mozilla-central/push/?count=1000" | |
JOB_LIST_URL="https://treeherder.mozilla.org/api/jobs/?push_id=" | |
BUILD_URL = "https://firefox-ci-tc.services.mozilla.com/api/queue/v1/task/%s/runs/0/artifacts/public%%2Fbuild%%2Ftarget.zip" | |
HEADERS = {'User-Agent': 'mingw-mozregression'} | |
class Struct: | |
def __init__(self, **entries): | |
self.__dict__.update(entries) | |
class JobSet: | |
def __init__(self, **entries): | |
self.x86_debug = None | |
self.x86_opt = None | |
self.x64_debug = None | |
self.x64_opt = None | |
def complete(self): | |
return self.x86_debug and self.x86_opt and self.x64_debug and self.x64_opt | |
def update(self, jobs): | |
for j in jobs: | |
if j.job_type_name == 'build-win64-mingwclang/opt': | |
assert self.x64_opt == None | |
self.x64_opt = j | |
elif j.job_type_name == 'build-win32-mingwclang/opt': | |
assert self.x86_opt == None | |
self.x86_opt = j | |
elif j.job_type_name == 'build-win64-mingwclang/debug': | |
assert self.x64_debug == None | |
self.x64_debug = j | |
elif j.job_type_name == 'build-win32-mingwclang/debug': | |
assert self.x86_debug == None | |
self.x86_debug = j | |
def __repr__(self): | |
s = "" | |
s += "x64 Opt: " | |
if self.x64_opt: | |
s += "(%s %s %s) " % (self.x64_opt.id, self.x64_opt.task_id, self.x64_opt.result) | |
else: | |
s += "None " | |
s += "x64 Debug: " | |
if self.x64_debug: | |
s += "(%s %s %s) " % (self.x64_debug.id, self.x64_debug.task_id, self.x64_debug.result) | |
else: | |
s += "None " | |
s += "x86 Opt: " | |
if self.x86_opt: | |
s += "(%s %s %s) " % (self.x86_opt.id, self.x86_opt.task_id, self.x86_opt.result) | |
else: | |
s += "None " | |
s += "x86 Debug: " | |
if self.x86_debug: | |
s += "(%s %s %s) " % (self.x86_debug.id, self.x86_debug.task_id, self.x86_debug.result) | |
else: | |
s += "None " | |
return s | |
def _realtime(timestamp): | |
format_str = '%Y-%m-%d_%H:%M:%S' | |
if re.match("^[0-9]+$", str(timestamp)): | |
return datetime.datetime.utcfromtimestamp(timestamp).strftime(format_str) | |
else: | |
return parser.parse(timestamp).strftime(format_str) | |
def _transform_job_list(property_names, job_list): | |
new_job_list = [] | |
for j in job_list: | |
d = {} | |
for i in range(len(property_names)): | |
d[property_names[i]] = j[i] | |
new_job_list.append(Struct(**d)) | |
return new_job_list | |
def get_mingw_jobs(push_id): | |
mingw_jobs = JobSet() | |
url = JOB_LIST_URL + str(push_id) | |
logging.debug("Fetching %s", url) | |
while url and not mingw_jobs.complete(): | |
r = requests.get(url, headers=HEADERS) | |
result_json = r.json() | |
jobs = _transform_job_list(result_json['job_property_names'], result_json['results']) | |
url = result_json['next'] | |
mingw_jobs.update(jobs) | |
return mingw_jobs | |
def download_mingw_build(job): | |
url = BUILD_URL % job.task_id | |
filename = job.job_type_name.replace("/", "_") + "_" + _realtime(job.last_modified) + ".zip" | |
logging.info("Downloading %s to %s", url, filename) | |
with requests.get(url, stream=True) as r: | |
r.raise_for_status() | |
with open(filename, 'wb') as f: | |
for chunk in r.iter_content(chunk_size=8192): | |
f.write(chunk) | |
if __name__ == "__main__": | |
logging.basicConfig(level=logging.DEBUG, format='%(levelname)s: %(message)s') | |
logging.info("Loading push list...") | |
logging.debug("Fetching %s", PUSH_LIST_URL) | |
r = requests.get(PUSH_LIST_URL, headers=HEADERS) | |
pushes = r.json() | |
logging.info("First job: %s %i", _realtime(pushes['results'][0]['push_timestamp']), pushes['results'][0]['id']) | |
logging.info("Last job: %s %i", _realtime(pushes['results'][-1]['push_timestamp']), pushes['results'][-1]['id']) | |
mingw_jobs = get_mingw_jobs(pushes['results'][1]['id']) | |
logging.info(mingw_jobs) | |
download_mingw_build(mingw_jobs.x64_opt) | |
""" | |
TODO | |
- Accept start and end dates as CLI flags | |
- Match up start and end dates to the pushes I retrieved | |
- Maybe go get more pushes using the &push_timestamp__lte=<> argument to go back in time | |
- Do a binary search on pushes between the dates | |
- Test the start and end dates to be certain | |
- Accept from the user if the push is good or broken | |
Future: | |
- Unzip and run the build for the user | |
- Download the next (two) builds in the binary search in the background in anticipation | |
- Remove the one I didn't need | |
- Offer to delete old runs | |
""" |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment