Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 21 additions & 20 deletions apis/bugzilla_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,22 @@ def is_prod(url):
return "allizom" not in url


def _load_json_or_raise(r, where: str):
# Work off bytes, decode once, tolerate bad unicode, and truncate preview.
raw = getattr(r, "content", b"") or b""
body = raw.decode("utf-8", errors="replace")
try:
return json.loads(body)
except Exception as e:
status = getattr(r, "status_code", "unknown")
ctype = getattr(r, "headers", {}).get("Content-Type", "unknown")
preview = body[:2000]
raise Exception(
f"{where}: Could not decode a Bugzilla response as JSON "
f"(status={status}, content_type={ctype}, len={len(body)}): {preview}"
) from e


def fileBug(url, apikey, ff_version, product, component, summary, description, cc_list, needinfo, see_also, depends_on, blocks, moco_confidential):
assert isinstance(cc_list, list)

Expand Down Expand Up @@ -74,10 +90,7 @@ def fileBug(url, apikey, ff_version, product, component, summary, description, c

r = requests.post(url + "bug?api_key=" + apikey, json=data)

try:
j = json.loads(r.text)
except Exception as e:
raise Exception("Could not decode a bugzilla response as JSON: " + r.text) from e
j = _load_json_or_raise(r, "fileBug")

if 'id' in j:
return j['id']
Expand Down Expand Up @@ -106,10 +119,7 @@ def commentOnBug(url, apikey, bugID, comment, needinfo=None, assignee=None):
json=data
)

try:
j = json.loads(r.text)
except Exception as e:
raise Exception("Could not decode a bugzilla response as JSON: " + r.text) from e
j = _load_json_or_raise(r, "commentOnBug")

if 'bugs' in j:
if len(j['bugs']) > 0:
Expand Down Expand Up @@ -137,10 +147,7 @@ def closeBug(url, apikey, bugID, resolution, comment, dup_id=None):
json=data
)

try:
j = json.loads(r.text)
except Exception as e:
raise Exception("Could not decode a bugzilla response as JSON: " + r.text) from e
j = _load_json_or_raise(r, "closeBug")

if 'bugs' in j:
if len(j['bugs']) > 0:
Expand All @@ -154,10 +161,7 @@ def closeBug(url, apikey, bugID, resolution, comment, dup_id=None):
def openBugsMetadata(url, bugIDs):
r = requests.get(url + "bug?resolution=---&id=%s&include_fields=id,assigned_to" % ",".join([str(b) for b in bugIDs]))

try:
j = json.loads(r.text)
except Exception as e:
raise Exception("Could not decode a bugzilla response as JSON: " + r.text) from e
j = _load_json_or_raise(r, "openBugsMetadata")

try:
return {b['id']: b for b in j['bugs']}
Expand All @@ -179,10 +183,7 @@ def markFFVersionAffected(url, apikey, bugID, ff_version, affected):
json=data
)

try:
j = json.loads(r.text)
except Exception as e:
raise Exception("Could not decode a bugzilla response as JSON: " + r.text) from e
j = _load_json_or_raise(r, "markFFVersionAffected")

if 'bugs' in j:
if len(j['bugs']) > 0:
Expand Down
4 changes: 3 additions & 1 deletion apis/phabricator.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,10 @@ def submit_patches(self, bug_id, has_patches):
phab_revisions = []

@retry
def submit_to_phabricator(rev_id):
def submit_to_phabricator(rev_id, retry_attempt=None):
cmd = [_arc(), "diff", "--verbatim", "--conduit-uri", self.url]
if retry_attempt > 1:
cmd.append("--trace")
if rev_id:
cmd.append(rev_id)
cmd.append("--")
Expand Down
4 changes: 2 additions & 2 deletions apis/taskcluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,11 +44,11 @@ def from_string(s, logger):
return Classification.NewFailure
elif s == "new failure not classified": # from TC
return Classification.NewFailure
elif s == "fixedByCommit": # from Push Health
elif s in ["fixedByCommit", "intermittent needs bugid"]: # from Push Health
return Classification.NotYourFault
elif s in ["autoclassified intermittent", "expected fail", "fixed by commit", "infra"]: # from TC
return Classification.NotYourFault
elif s == "intermittent": # from Push Health
elif s in ["intermittent"]: # from Push Health
return Classification.PossibleIntermittent
elif s in ["intermittent", "not classified"]: # from TC
return Classification.PossibleIntermittent
Expand Down
42 changes: 20 additions & 22 deletions components/utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.

import copy
import inspect
import pickle
import functools
import time
Expand Down Expand Up @@ -144,28 +145,25 @@ def decorate(func):
# Retry calling a function `times` times, sleeping between each tries, with an exponential backoff
# This is to be used on API calls, that are likely to fail

RETRY_TIMES = 10
def retry(_func=None, *, times=10, sleep_s=1, exp=2, exceptions=(Exception,), attempt_kw="retry_attempt"):
def decorator(func):
sig = inspect.signature(func)
accepts_attempt = attempt_kw in sig.parameters


def retry(_func=None, *, sleep_s=1, exp=2):
def decorator_retry(func):
@functools.wraps(func)
def wrapper_retry(*args, **kwargs):
global RETRY_TIMES
retries_try = RETRY_TIMES
sleep_duration = sleep_s
while retries_try > 0:
def wrapper(*args, **kwargs):
backoff = sleep_s
for attempt in range(1, times + 1):
try:
return func(*args, **kwargs)
except BaseException as e:
retries_try -= 1
time.sleep(sleep_duration)
sleep_duration *= exp
if retries_try == 0:
raise e
return wrapper_retry

if _func is None:
return decorator_retry
else:
return decorator_retry(_func)
call_kwargs = dict(kwargs)
if accepts_attempt:
call_kwargs[attempt_kw] = attempt
return func(*args, **call_kwargs)
except exceptions:
if attempt == times:
raise # preserves the original traceback
time.sleep(backoff)
backoff *= exp
return wrapper

return decorator if _func is None else decorator(_func)
35 changes: 33 additions & 2 deletions tests/functionality_all_platforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
from apis.taskcluster import TaskclusterProvider
from apis.phabricator import PhabricatorProvider

from tests.functionality_utilities import SHARED_COMMAND_MAPPINGS, TRY_OUTPUT, TRY_LOCKED_OUTPUT, CONDUIT_EDIT_OUTPUT, MockedBugzillaProvider, treeherder_response
from tests.functionality_utilities import SHARED_COMMAND_MAPPINGS, TRY_OUTPUT, TRY_LOCKED_OUTPUT, ARC_OUTPUT, CONDUIT_EDIT_OUTPUT, MockedBugzillaProvider, treeherder_response
from tests.mock_commandprovider import TestCommandProvider
from tests.mock_libraryprovider import MockLibraryProvider
from tests.mock_treeherder_server import MockTreeherderServerFactory, TYPE_HEALTH
Expand Down Expand Up @@ -662,7 +662,7 @@ def treeherder(request_type, fullpath):

def try_submit(cmd):
nonlocal try_fails
if try_fails == 0:
if try_fails < 2:
try_fails += 1
raise Exception("No worky!")
if "./mach try auto" in cmd:
Expand Down Expand Up @@ -697,6 +697,37 @@ def try_submit(cmd):
finally:
self._cleanup(u, expected_values)

# Fail the first time, then work.
@logEntryExitHeaderLine
def testPhabRetryFunctionality(self):
try_fails = 0

def phab_submit(cmd):
nonlocal try_fails
if try_fails == 0:
try_fails += 1
raise Exception("No worky!")
if "--trace" not in cmd:
raise Exception("Expected to see --trace in the phabricator command")
return ARC_OUTPUT % (83000 + 50, 83000 + 50)

library_filter = 'dav1d'
(u, expected_values, _check_jobs) = self._setup(
library_filter,
lambda b: ["try_rev|2021-02-09 15:30:04 -0500|2021-02-12 17:40:01 +0000"],
lambda: 50, # get_filed_bug_id_func,
lambda b: {}, # filed_bug_ids_func
AssertFalse, # treeherder_response
command_callbacks={'phab_submit': phab_submit}
)
try:
# Run it
u.run(library_filter=library_filter)
# Check that we created the job successfully
_check_jobs(JOBSTATUS.AWAITING_SECOND_PLATFORMS_TRY_RESULTS, JOBOUTCOME.PENDING)
finally:
self._cleanup(u, expected_values)

@logEntryExitHeaderLine
def testAllNewFuzzyPathJobs(self):
@treeherder_response
Expand Down
2 changes: 1 addition & 1 deletion tests/mock_treeherder_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
TYPE_HEALTH = "health"
TYPE_JOBS = "jobs"

FAILURE_CLASSIFICATIONS = """[{"id":7,"name":"autoclassified intermittent"},{"id":3,"name":"expected fail"},{"id":2,"name":"fixed by commit"},{"id":5,"name":"infra"},{"id":4,"name":"intermittent"},{"id":1,"name":"not classified"}]"""
FAILURE_CLASSIFICATIONS = """[{"id":7,"name":"autoclassified intermittent"},{"id":8,"name":"intermittent needs bugid"},{"id":3,"name":"expected fail"},{"id":2,"name":"fixed by commit"},{"id":5,"name":"infra"},{"id":4,"name":"intermittent"},{"id":1,"name":"not classified"}]"""

EXPECTEDPATH_PUSH = "push/?revision="
EXPECTEDPATH_JOBS = "jobs/?push_id="
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,7 @@
"config": "opt",
"key": "tLeakSanitizerleakatjs_pod_arena_mallocmaybe_pod_arena_mallocjsdetailOrderedHashTablepod_mallocoptlinux180464asantestlinux180464asanoptmochitestbrowserchromee10s1Mochitests",
"jobKey": "optlinux1804-64-asantest-linux1804-64-asan/opt-mochitest-browser-chrome-e10s-1Mochitests",
"suggestedClassification": "fixedByCommit",
"suggestedClassification": "intermittent needs bugid",
"confidence": 100,
"tier": 1,
"failedInParent": false,
Expand Down