DXR is a code search and navigation tool aimed at making sense of large projects. It supports full-text and regex searches as well as structural queries.

Git (5daaab71bb)

VCS Links

Line Code
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66
import requests

from datetime import datetime, date, timedelta
import json
import os
import sys

CRASH_STATS_BASE_URL = "https://crash-stats.mozilla.com/api/"


def getVersions(channel):
    # Query the list of "Current Versions", which is all that crash-stats provides ADI data for
    r = requests.get(CRASH_STATS_BASE_URL + "ProductVersions",
                     params={"active": "true", "product": "Firefox", "build_type": channel},
                     timeout=20)
    versions = [data["version"] for data in r.json().get("hits", [])]
    # Exclude the special "X.0b" versions which provide no ADI
    if channel == "beta":
        versions = [v for v in versions if not v.endswith("b")]
    return versions


def getPartialJSON(channel, timestamp):
    # Query the ADI data for a given channel and day
    versions = getVersions(channel)
    r = requests.get(CRASH_STATS_BASE_URL + "ADI",
                     params={"start_date": timestamp, "end_date": timestamp, "product": "Firefox",
                             "platforms": ["Windows", "Mac OS X", "Linux"], "versions": versions},
                     timeout=20)
    ADI = []
    for data in r.json().get("hits", []):
        # Translate crash-stats handling of the RC case, X.0b99 --> X.0
        # NB: this merges all RC builds together but we can"t do anything about that
        # TODO - check what happens in the UI
        ADI.append({"version": data["version"].replace("b99", ""), "ADI": data["adi_count"]})
    # ship-it expects the data to be in descending-ADI order
    ADI.sort(key=lambda x: x['ADI'], reverse=True)
    return ADI


def getLatestDataset():
    # ADI data is laggy so we work backwards to find the newest available data
    timestamp = date.today()
    limit = timestamp - timedelta(days=7)
    while getPartialJSON("release", timestamp) == []:
        timestamp = timestamp - timedelta(days=1)
        if timestamp <= limit:
            sys.exit("Stale ADI data, giving up")
    return timestamp


def saveAllPartial(exportFile):
    timestamp = getLatestDataset()
    partialESR = getPartialJSON("esr", timestamp)
    partialRelease = getPartialJSON("release", timestamp)
    partialBeta = getPartialJSON("beta", timestamp)
    lastUpdate = "%s using %s ADI data" % (datetime.utcnow().strftime("%d/%m/%Y %H:%M"),
                                           timestamp.strftime("%d/%m/%Y"))
    full = {"beta": partialBeta, "release": partialRelease, "esr": partialESR, "lastUpdate": lastUpdate}

    with open(exportFile + '.tmp', "w") as outfile:
        json.dump(full, outfile)
    os.rename(exportFile + '.tmp', exportFile)

if __name__ == "__main__":
    saveAllPartial("partial.json")