summaryrefslogtreecommitdiff
path: root/bin/update
diff options
context:
space:
mode:
authorMarkus Mohrhard <markus.mohrhard@googlemail.com>2016-08-24 23:30:17 +0200
committerMarkus Mohrhard <markus.mohrhard@googlemail.com>2017-05-19 03:43:19 +0200
commitb221bbc5f85e8d8ade648badbb73dc4892ed6dfc (patch)
tree7296867e39cf150be496d0425dcac5f4831e6530 /bin/update
parentde4678e4ea0e3983bee9238722a65fd6fc32f847 (diff)
add files to send update info to balrog server
Change-Id: Iffd2c87987c7bb0b3f892bf8b3038822daf67439
Diffstat (limited to 'bin/update')
-rw-r--r--bin/update/balrog-release-pusher.py115
-rw-r--r--bin/update/balrog-release-shipper.py87
-rw-r--r--bin/update/balrog-submitter.py98
-rw-r--r--bin/update/balrog/__init__.py0
-rw-r--r--bin/update/balrog/submitter/__init__.py0
-rw-r--r--bin/update/balrog/submitter/__init__.pycbin0 -> 148 bytes
-rw-r--r--bin/update/balrog/submitter/api.py229
-rw-r--r--bin/update/balrog/submitter/api.pycbin0 -> 8696 bytes
-rw-r--r--bin/update/balrog/submitter/cli.py502
-rw-r--r--bin/update/balrog/submitter/cli.pycbin0 -> 16719 bytes
-rw-r--r--bin/update/balrog/submitter/updates.py26
-rw-r--r--bin/update/balrog/submitter/updates.pycbin0 -> 881 bytes
-rw-r--r--bin/update/release/__init__.py0
-rw-r--r--bin/update/release/__init__.pycbin0 -> 139 bytes
-rw-r--r--bin/update/release/info.py2
-rw-r--r--bin/update/release/info.pycbin0 -> 292 bytes
-rw-r--r--bin/update/release/platforms.py8
-rw-r--r--bin/update/release/platforms.pycbin0 -> 629 bytes
-rw-r--r--bin/update/release/versions.py2
-rw-r--r--bin/update/release/versions.pycbin0 -> 306 bytes
-rw-r--r--bin/update/util/__init__.py0
-rw-r--r--bin/update/util/__init__.pycbin0 -> 136 bytes
-rw-r--r--bin/update/util/algorithms.py10
-rw-r--r--bin/update/util/algorithms.pycbin0 -> 527 bytes
24 files changed, 1079 insertions, 0 deletions
diff --git a/bin/update/balrog-release-pusher.py b/bin/update/balrog-release-pusher.py
new file mode 100644
index 000000000000..648694cd0ea9
--- /dev/null
+++ b/bin/update/balrog-release-pusher.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+import os
+from os import path
+import re
+import logging
+import sys
+
+# Use explicit version of python-requests
+sys.path.insert(0, path.join(path.dirname(__file__),
+ "../../lib/python/vendor/requests-2.7.0"))
+
+from balrog.submitter.cli import ReleaseCreatorV3, ReleaseCreatorV4, \
+ ReleasePusher
+from release.info import readReleaseConfig
+from util.retry import retry
+
+REQUIRED_CONFIG = ('appVersion', 'productName', 'version', 'enUSPlatforms',
+ 'baseTag', 'updateChannels', 'buildNumber', 'partialUpdates',
+ 'ftpServer', 'bouncerServer')
+
+def validate(options):
+ err = False
+ config = {}
+
+ if not path.exists(path.join('buildbot-configs', options.release_config)):
+ print "%s does not exist!" % options.release_config
+ sys.exit(1)
+
+ config = readReleaseConfig(path.join('buildbot-configs',
+ options.release_config))
+ for key in REQUIRED_CONFIG:
+ if key not in config:
+ err = True
+ print "Required item missing in config: %s" % key
+
+ if err:
+ sys.exit(1)
+ return config
+
+if __name__ == '__main__':
+
+ from optparse import OptionParser
+ parser = OptionParser()
+ parser.add_option("-p", "--build-properties", dest="build_properties")
+ parser.add_option("-b", "--buildbot-configs", dest="buildbot_configs",
+ help="The place to clone buildbot-configs from"))
+ parser.add_option("-r", "--release-config", dest="release_config")
+ parser.add_option("-a", "--api-root", dest="api_root")
+ parser.add_option("-c", "--credentials-file", dest="credentials_file")
+ parser.add_option("-s", "--schema", dest="schema_version",
+ help="blob schema version", type="int", default=4)
+ parser.add_option("-u", "--username", dest="username")
+ parser.add_option("-C", "--release-channel", dest="release_channel")
+ parser.add_option("-v", "--verbose", dest="verbose", action="store_true")
+ options, args = parser.parse_args()
+
+ logging_level = logging.INFO
+ if options.verbose:
+ logging_level = logging.DEBUG
+ logging.basicConfig(stream=sys.stdout, level=logging_level,
+ format="%(message)s")
+
+ for opt in ('build_properties', 'release_config', 'api_root', 'credentials_file', 'buildbot_configs', 'username', 'release_channel'):
+ if not getattr(options, opt):
+ print >>sys.stderr, "Required option %s not present" % opt
+ sys.exit(1)
+
+ if options.schema_version not in (3,4):
+ parser.error("Only schema_versions 3 & 4 supported.")
+
+ release_channel = options.release_channel
+ properties = json.load(open(options.build_properties))['properties']
+ releaseTag = properties['script_repo_revision']
+ hashType = properties['hashType']
+ retry(mercurial, args=(options.buildbot_configs, 'buildbot-configs'), kwargs=dict(revision=releaseTag))
+ release_config = validate(options)
+ channelInfo = release_config["updateChannels"][release_channel]
+
+ credentials = {}
+ execfile(options.credentials_file, credentials)
+ auth = (options.username, credentials['balrog_credentials'][options.username])
+ updateChannels = [
+ release_channel,
+ channelInfo['localTestChannel'],
+ channelInfo['cdnTestChannel']
+ ]
+
+ if options.schema_version == 3:
+ creator = ReleaseCreatorV3(options.api_root, auth)
+ else:
+ creator= ReleaseCreatorV4(options.api_root, auth)
+ partials = {}
+ for v in release_config['partialUpdates']:
+ if re.match(channelInfo.get("versionRegex", "^.*$"), v):
+ partials[v] = release_config["partialUpdates"][v]
+
+ creator.run(release_config['appVersion'], release_config['productName'].capitalize(),
+ release_config['version'], release_config['buildNumber'],
+ updateChannels, release_config['ftpServer'],
+ release_config['bouncerServer'], release_config['enUSPlatforms'],
+ hashType, openURL=release_config.get('openURL'),
+ partialUpdates=partials,
+ requiresMirrors=channelInfo.get("requiresMirrors", True))
+
+ testChannelRuleIds = []
+ for c in channelInfo["testChannels"].values():
+ testChannelRuleIds.append(c["ruleId"])
+ pusher = ReleasePusher(options.api_root, auth)
+ pusher.run(release_config['productName'].capitalize(), release_config['version'],
+ release_config['buildNumber'], testChannelRuleIds)
diff --git a/bin/update/balrog-release-shipper.py b/bin/update/balrog-release-shipper.py
new file mode 100644
index 000000000000..5ad3f05964fc
--- /dev/null
+++ b/bin/update/balrog-release-shipper.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+import os
+from os import path
+import logging
+import sys
+
+from balrog.submitter.cli import ReleasePusher
+from release.info import readReleaseConfig
+from util.retry import retry
+from util.hg import mercurial, make_hg_url
+
+HG = "hg.mozilla.org"
+DEFAULT_BUILDBOT_CONFIGS_REPO = make_hg_url(HG, 'build/buildbot-configs')
+REQUIRED_CONFIG = ('productName', 'version', 'buildNumber', "updateChannels")
+
+def validate(options):
+ err = False
+ config = {}
+
+ if not path.exists(path.join('buildbot-configs', options.release_config)):
+ print "%s does not exist!" % options.release_config
+ sys.exit(1)
+
+ config = readReleaseConfig(path.join('buildbot-configs',
+ options.release_config))
+ for key in REQUIRED_CONFIG:
+ if key not in config:
+ err = True
+ print "Required item missing in config: %s" % key
+
+ if err:
+ sys.exit(1)
+ return config
+
+if __name__ == '__main__':
+
+ from optparse import OptionParser
+ parser = OptionParser()
+ parser.add_option("-p", "--build-properties", dest="build_properties")
+ parser.add_option("-b", "--buildbot-configs", dest="buildbot_configs",
+ help="The place to clone buildbot-configs from",
+ default=os.environ.get('BUILDBOT_CONFIGS_REPO',
+ DEFAULT_BUILDBOT_CONFIGS_REPO))
+ parser.add_option("-r", "--release-config", dest="release_config")
+ parser.add_option("-a", "--api-root", dest="api_root")
+ parser.add_option("-c", "--credentials-file", dest="credentials_file")
+ parser.add_option("-u", "--username", dest="username")
+ parser.add_option("-C", "--release-channel", dest="release_channel")
+ parser.add_option("-v", "--verbose", dest="verbose", action="store_true")
+ options, args = parser.parse_args()
+
+ logging_level = logging.INFO
+ if options.verbose:
+ logging_level = logging.DEBUG
+ logging.basicConfig(stream=sys.stdout, level=logging_level,
+ format="%(message)s")
+
+ for opt in ('build_properties', 'release_config', 'api_root', 'credentials_file', 'buildbot_configs', 'username', "release_channel"):
+ if not getattr(options, opt):
+ print >>sys.stderr, "Required option %s not present" % opt
+ sys.exit(1)
+
+ properties = json.load(open(options.build_properties))['properties']
+
+ if properties.get("shipit") != "shipit":
+ print >>sys.stderr, "Magic keyword not present in properties, bailing"
+ sys.exit(1)
+
+ release_channel = options.release_channel
+ releaseTag = properties['script_repo_revision']
+ retry(mercurial, args=(options.buildbot_configs, 'buildbot-configs'), kwargs=dict(revision=releaseTag))
+ release_config = validate(options)
+
+ credentials = {}
+ execfile(options.credentials_file, credentials)
+ auth = (options.username, credentials['balrog_credentials'][options.username])
+
+ ruleIds = [release_config["updateChannels"][release_channel]["ruleId"]]
+
+ pusher = ReleasePusher(options.api_root, auth)
+ pusher.run(release_config['productName'].capitalize(), release_config['version'],
+ release_config['buildNumber'], ruleIds)
diff --git a/bin/update/balrog-submitter.py b/bin/update/balrog-submitter.py
new file mode 100644
index 000000000000..ee9d3de84e3f
--- /dev/null
+++ b/bin/update/balrog-submitter.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env python
+
+import json
+import os
+import logging
+import sys
+
+from balrog.submitter.cli import NightlySubmitterV3, ReleaseSubmitterV3, \
+ NightlySubmitterV4, ReleaseSubmitterV4
+
+if __name__ == '__main__':
+ from optparse import OptionParser
+ parser = OptionParser()
+ parser.add_option("-p", "--build-properties", dest="build_properties")
+ parser.add_option("-a", "--api-root", dest="api_root")
+ parser.add_option("-c", "--credentials-file", dest="credentials_file")
+ parser.add_option("-u", "--username", dest="username")
+ parser.add_option("-t", "--type", dest="type_", help="nightly or release", default="nightly")
+ parser.add_option("-s", "--schema", dest="schema_version",
+ help="blob schema version", type="int", default=4)
+ parser.add_option(
+ "-r", "--url-replacement", action="append", dest="url_replacements",
+ help="""
+Coma-separated pair of from/to string to be replaced in the final URL, e.g.
+--url-replacement ftp.mozilla.org,download.cdn.mozilla.net
+Valid for nightly updates only.
+""")
+ parser.add_option("-d", "--dummy", dest="dummy", action="store_true",
+ help="Add '-dummy' suffix to branch name")
+ parser.add_option("-v", "--verbose", dest="verbose", action="store_true")
+ options, args = parser.parse_args()
+
+ logging_level = logging.INFO
+ if options.verbose:
+ logging_level = logging.DEBUG
+ logging.basicConfig(stream=sys.stdout, level=logging_level,
+ format="%(message)s")
+
+ credentials = {}
+ execfile(options.credentials_file, credentials)
+ auth = (options.username, credentials['balrog_credentials'][options.username])
+ fp = open(options.build_properties)
+ bp = json.load(fp)
+ fp.close()
+
+ if options.schema_version not in (3, 4):
+ parser.error("Only schema_versions 3 and 4 supported.")
+ props = bp['properties']
+ locale = props.get('locale', 'en-US')
+ extVersion = props.get('extVersion', props['appVersion'])
+ url_replacements = []
+ if options.url_replacements:
+ for replacement in options.url_replacements:
+ from_, to = replacement.split(",")
+ url_replacements.append([from_, to])
+ if options.type_ == "nightly":
+ updateKwargs = {}
+
+ if options.schema_version == 3:
+ submitter = NightlySubmitterV3(options.api_root, auth,
+ options.dummy,
+ url_replacements=url_replacements)
+ else:
+ submitter = NightlySubmitterV4(options.api_root, auth,
+ options.dummy,
+ url_replacements=url_replacements)
+
+ updateKwargs["completeInfo"] = [{
+ 'size': props['completeMarSize'],
+ 'hash': props['completeMarHash'],
+ 'url': props['completeMarUrl'],
+ }]
+ if "partialInfo" in props:
+ updateKwargs["partialInfo"] = props["partialInfo"]
+
+ submitter.run(props['platform'], props['buildid'], props['appName'],
+ props['branch'], props['appVersion'], locale, props['hashType'],
+ extVersion, **updateKwargs)
+ elif options.type_ == "release":
+ updateKwargs = {}
+ if options.schema_version == 3:
+ submitter = ReleaseSubmitterV3(options.api_root, auth, options.dummy)
+ else:
+ submitter = ReleaseSubmitterV4(options.api_root, auth, options.dummy)
+
+ updateKwargs["completeInfo"] = [{
+ 'size': props['completeMarSize'],
+ 'hash': props['completeMarHash'],
+ }]
+ if "partialInfo" in props:
+ updateKwargs["partialInfo"] = props["partialInfo"]
+
+ submitter.run(props['platform'], props['appName'], props['appVersion'],
+ props['version'], props['build_number'], locale,
+ props['hashType'], extVersion, props['buildid'],
+ **updateKwargs)
+ else:
+ parser.error("Invalid value for --type")
diff --git a/bin/update/balrog/__init__.py b/bin/update/balrog/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
--- /dev/null
+++ b/bin/update/balrog/__init__.py
diff --git a/bin/update/balrog/submitter/__init__.py b/bin/update/balrog/submitter/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
--- /dev/null
+++ b/bin/update/balrog/submitter/__init__.py
diff --git a/bin/update/balrog/submitter/__init__.pyc b/bin/update/balrog/submitter/__init__.pyc
new file mode 100644
index 000000000000..2c61bff13654
--- /dev/null
+++ b/bin/update/balrog/submitter/__init__.pyc
Binary files differ
diff --git a/bin/update/balrog/submitter/api.py b/bin/update/balrog/submitter/api.py
new file mode 100644
index 000000000000..ea369a46f9ed
--- /dev/null
+++ b/bin/update/balrog/submitter/api.py
@@ -0,0 +1,229 @@
+import json
+import logging
+import requests
+import os
+import time
+
+CA_BUNDLE = os.path.join(os.path.dirname(__file__),
+ '../../../../misc/certs/ca-bundle.crt')
+
+
+def is_csrf_token_expired(token):
+ from datetime import datetime
+ expiry = token.split('##')[0]
+ if expiry <= datetime.now().strftime('%Y%m%d%H%M%S'):
+ return True
+ return False
+
+
+class API(object):
+ """A class that knows how to make requests to a Balrog server, including
+ pre-retrieving CSRF tokens and data versions.
+
+ url_template: The URL to submit to when request() is called. Standard
+ Python string interpolation can be used here in
+ combination with url_template_vars.
+ prerequest_url_template: Before submitting the real request, a HEAD
+ operation will be done on this URL. If the
+ HEAD request succeeds, it is expected that
+ there will be X-CSRF-Token and X-Data-Version
+ headers in the response. If the HEAD request
+ results in a 404, another HEAD request to
+ /csrf_token will be made in attempt to get a
+ CSRF Token. This URL can use string
+ interpolation the same way url_template can.
+ In some cases this may be the same as the
+ url_template.
+ """
+ verify = False
+ auth = None
+ url_template = None
+ prerequest_url_template = None
+ url_template_vars = None
+
+ def __init__(self, api_root='https://aus4-admin-dev.allizom.org/api',
+ auth=None, ca_certs=CA_BUNDLE, timeout=60,
+ raise_exceptions=True):
+ """ Creates an API object which wraps REST API of Balrog server.
+
+ api_root: API root URL of balrog server
+ auth : a tuple of (username, password) or None
+ ca_certs: CA bundle. It follows python-requests `verify' usage.
+ If set to False, no SSL verification is done.
+ If set to True, it tries to load a CA bundle from certifi
+ module.
+ If set to string, puthon-requests uses it as a pth to path to
+ CA bundle.
+ timeout : request timeout
+ raise_exceptions: controls exception handling of python-requests.
+ """
+ self.api_root = api_root.rstrip('/')
+ self.verify = ca_certs
+ assert isinstance(auth, tuple) or auth == None, \
+ "auth should be set to tuple or None"
+ self.auth = auth
+ self.timeout = timeout
+ self.raise_exceptions = raise_exceptions
+ self.session = requests.session()
+ self.csrf_token = None
+
+ def request(self, data=None, method='GET'):
+ url = self.api_root + self.url_template % self.url_template_vars
+ prerequest_url = self.api_root + \
+ self.prerequest_url_template % self.url_template_vars
+ # If we'll be modifying things, do a GET first to get a CSRF token
+ # and possibly a data_version.
+ if method != 'GET' and method != 'HEAD':
+ # Use the URL of the resource we're going to modify first,
+ # because we'll need a CSRF token, and maybe its data version.
+ try:
+ res = self.do_request(prerequest_url, None, 'HEAD')
+ # If a data_version was specified we shouldn't overwrite it
+ # because the caller may be acting on a modified version of
+ # a specific older version of the data.
+ if 'data_version' not in data:
+ data['data_version'] = res.headers['X-Data-Version']
+ # We may already have a non-expired CSRF token, but it's
+ # faster/easier just to set it again even if we do, since
+ # we've already made the request.
+ data['csrf_token'] = self.csrf_token = res.headers[
+ 'X-CSRF-Token']
+ except requests.HTTPError, e:
+ # However, if the resource doesn't exist yet we may as well
+ # not bother doing another request solely for a token unless
+ # we don't have a valid one already.
+ if e.response.status_code != 404:
+ raise
+ if not self.csrf_token or is_csrf_token_expired(self.csrf_token):
+ res = self.do_request(
+ self.api_root + '/csrf_token', None, 'HEAD')
+ data['csrf_token'] = self.csrf_token = res.headers[
+ 'X-CSRF-Token']
+
+ return self.do_request(url, data, method)
+
+ def do_request(self, url, data, method):
+ logging.debug('Balrog request to %s' % url)
+ if data is not None and 'csrf_token' in data:
+ sanitised_data = data.copy()
+ del sanitised_data['csrf_token']
+ logging.debug('Data sent: %s' % sanitised_data)
+ else:
+ logging.debug('Data sent: %s' % data)
+ headers = {'Accept-Encoding': 'application/json',
+ 'Accept': 'application/json'}
+ before = time.time()
+ req = self.session.request(
+ method=method, url=url, data=data, timeout=self.timeout,
+ verify=self.verify, auth=self.auth, headers=headers)
+ try:
+ if self.raise_exceptions:
+ req.raise_for_status()
+ return req
+ except requests.HTTPError, e:
+ logging.error('Caught HTTPError: %s' % e.response.content)
+ raise
+ finally:
+ stats = {
+ "timestamp": time.time(),
+ "method": method,
+ "url": url,
+ "status_code": req.status_code,
+ "elapsed_secs": time.time() - before,
+ }
+ logging.debug('REQUEST STATS: %s', json.dumps(stats))
+
+ def get_data(self):
+ resp = self.request()
+ return (json.loads(resp.content), resp.headers['X-Data-Version'])
+
+
+class Release(API):
+ url_template = '/releases/%(name)s'
+ prerequest_url_template = '/releases/%(name)s'
+
+ def __init__(self, name, **kwargs):
+ super(Release, self).__init__(**kwargs)
+ self.name = name
+ self.url_template_vars = dict(name=name)
+
+
+ def update_release(self, product, hashFunction, releaseData,
+ data_version=None, schemaVersion=None):
+ data = dict(name=self.name, product=product,
+ hashFunction=hashFunction, data=releaseData)
+ if data_version:
+ data['data_version'] = data_version
+ if schemaVersion:
+ data['schema_version'] = schemaVersion
+ return self.request(method='POST', data=data)
+
+
+class SingleLocale(API):
+ url_template = '/releases/%(name)s/builds/%(build_target)s/%(locale)s'
+ prerequest_url_template = '/releases/%(name)s'
+
+ def __init__(self, name, build_target, locale, **kwargs):
+ super(SingleLocale, self).__init__(**kwargs)
+ self.name = name
+ self.build_target = build_target
+ self.locale = locale
+ self.url_template_vars = dict(name=name, build_target=build_target,
+ locale=locale)
+ # keep a copy to be used in get_data()
+ self.release_kwargs = kwargs
+
+ def get_data(self):
+ data, data_version = {}, None
+ # If the locale-specific API end point returns 404, we have to use the
+ # top level blob to get the data version. Because this requires 2 not
+ # atomic HTTP requests, we start with the top level blob and use its
+ # data version.
+ top_level = Release(name=self.name, **self.release_kwargs)
+ # Use data version from the top level blob
+ try:
+ _, data_version = top_level.get_data()
+ except requests.HTTPError, e:
+ if e.response.status_code == 404:
+ # top level blob doesn't exist, assume there is no data
+ return data, data_version
+ else:
+ raise
+ # Got data version. Try to get data from the locale specific blob.
+ # Using data version from the top level blob prevents possible race
+ # conditions if another client updates the locale blob between the
+ # first request and the call below.
+ try:
+ data, _ = super(SingleLocale, self).get_data()
+ return data, data_version
+ except requests.HTTPError, e:
+ if e.response.status_code == 404:
+ # locale blob doesn't exist, no data
+ return data, data_version
+ else:
+ raise
+
+ def update_build(self, product, hashFunction, buildData,
+ alias=None, schemaVersion=None, data_version=None):
+ data = dict(product=product, data=buildData, hashFunction=hashFunction)
+ if alias:
+ data['alias'] = alias
+ if data_version:
+ data['data_version'] = data_version
+ if schemaVersion:
+ data['schema_version'] = schemaVersion
+
+ return self.request(method='PUT', data=data)
+
+
+class Rule(API):
+ url_template = '/rules/%(rule_id)s'
+ prerequest_url_template = '/rules/%(rule_id)s'
+
+ def __init__(self, rule_id, **kwargs):
+ super(Rule, self).__init__(**kwargs)
+ self.rule_id = rule_id
+ self.url_template_vars=dict(rule_id=rule_id)
+
+ def update_rule(self, **rule_data):
+ return self.request(method='POST', data=rule_data)
diff --git a/bin/update/balrog/submitter/api.pyc b/bin/update/balrog/submitter/api.pyc
new file mode 100644
index 000000000000..26428d299e0d
--- /dev/null
+++ b/bin/update/balrog/submitter/api.pyc
Binary files differ
diff --git a/bin/update/balrog/submitter/cli.py b/bin/update/balrog/submitter/cli.py
new file mode 100644
index 000000000000..d00296e51499
--- /dev/null
+++ b/bin/update/balrog/submitter/cli.py
@@ -0,0 +1,502 @@
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+from release.info import getProductDetails
+# from release.paths import makeCandidatesDir
+from release.platforms import buildbot2updatePlatforms, buildbot2bouncer, \
+ buildbot2ftp
+from release.versions import getPrettyVersion
+from balrog.submitter.api import Release, SingleLocale, Rule
+from balrog.submitter.updates import merge_partial_updates
+from util.algorithms import recursive_update
+# from util.retry import retry
+import logging
+from requests.exceptions import HTTPError
+
+log = logging.getLogger(__name__)
+
+
+def get_nightly_blob_name(productName, branch, build_type, suffix, dummy=False):
+ if dummy:
+ branch = '%s-dummy' % branch
+ return '%s-%s-%s-%s' % (productName, branch, build_type, suffix)
+
+
+def get_release_blob_name(productName, version, build_number, dummy=False):
+ name = '%s-%s-build%s' % (productName, version, build_number)
+ if dummy:
+ name += '-dummy'
+ return name
+
+
+class ReleaseCreatorBase(object):
+ def __init__(self, api_root, auth, dummy=False):
+ self.api_root = api_root
+ self.auth = auth
+ self.dummy = dummy
+
+ def generate_data(self, appVersion, productName, version, buildNumber,
+ updateChannels, ftpServer, bouncerServer,
+ enUSPlatforms, schemaVersion, openURL=None,
+ **updateKwargs):
+ assert schemaVersion in (3, 4), 'Unhandled schema version %s' % schemaVersion
+ data = {
+ 'detailsUrl': getProductDetails(productName.lower(), appVersion),
+ 'platforms': {},
+ 'fileUrls': {},
+ 'appVersion': appVersion,
+ 'platformVersion': appVersion,
+ 'displayVersion': getPrettyVersion(version)
+ }
+
+ actions = []
+ if openURL:
+ actions.append("showURL")
+ data["openURL"] = openURL
+
+ if actions:
+ data["actions"] = " ".join(actions)
+
+ fileUrls = self._getFileUrls(productName, version, buildNumber,
+ updateChannels, ftpServer,
+ bouncerServer, **updateKwargs)
+ if fileUrls:
+ data.update(fileUrls)
+
+ updateData = self._get_update_data(productName, version, **updateKwargs)
+ if updateData:
+ data.update(updateData)
+
+ for platform in enUSPlatforms:
+ updatePlatforms = buildbot2updatePlatforms(platform)
+ bouncerPlatform = buildbot2bouncer(platform)
+ ftpPlatform = buildbot2ftp(platform)
+ data['platforms'][updatePlatforms[0]] = {
+ 'OS_BOUNCER': bouncerPlatform,
+ 'OS_FTP': ftpPlatform
+ }
+ for aliasedPlatform in updatePlatforms[1:]:
+ data['platforms'][aliasedPlatform] = {
+ 'alias': updatePlatforms[0]
+ }
+
+ return data
+
+ def run(self, appVersion, productName, version, buildNumber,
+ updateChannels, ftpServer, bouncerServer,
+ enUSPlatforms, hashFunction, schemaVersion, openURL=None,
+ **updateKwargs):
+ data = self.generate_data(appVersion, productName, version,
+ buildNumber, updateChannels,
+ ftpServer, bouncerServer, enUSPlatforms,
+ schemaVersion, openURL, **updateKwargs)
+ name = get_release_blob_name(productName, version, buildNumber,
+ self.dummy)
+ api = Release(name=name, auth=self.auth, api_root=self.api_root)
+ try:
+ current_data, data_version = api.get_data()
+ except HTTPError, e:
+ if e.response.status_code == 404:
+ log.warning("Release blob doesn't exist, using empty data...")
+ current_data, data_version = {}, None
+ else:
+ raise
+
+ data = recursive_update(current_data, data)
+ api.update_release(product=productName,
+ hashFunction=hashFunction,
+ releaseData=json.dumps(data),
+ schemaVersion=schemaVersion,
+ data_version=data_version)
+
+
+class ReleaseCreatorV3(ReleaseCreatorBase):
+ def run(self, *args, **kwargs):
+ return ReleaseCreatorBase.run(self, *args, schemaVersion=3, **kwargs)
+
+ def _getFileUrls(self, productName, version, buildNumber, updateChannels,
+ ftpServer, bouncerServer, partialUpdates):
+ data = {}
+
+ for channel in updateChannels:
+ if channel in ('betatest', 'esrtest') or "localtest" in channel:
+ # TODO: moggi: what does this do?
+ # dir_ = makeCandidatesDir(productName.lower(), version,
+ # buildNumber, server=ftpServer, protocol='http')
+ # data["fileUrls"][channel] = '%supdate/%%OS_FTP%%/%%LOCALE%%/%%FILENAME%%' % dir_
+ pass
+ else:
+ url = 'http://%s/?product=%%PRODUCT%%&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % bouncerServer
+ data["fileUrls"][channel] = url
+
+ return data
+
+ def _get_update_data(self, productName, version, partialUpdates):
+ data = {
+ "ftpFilenames": {
+ "completes": {
+ "*": "%s-%s.complete.mar" % (productName.lower(), version),
+ }
+ },
+ "bouncerProducts": {
+ "completes": {
+ "*": "%s-%s-complete" % (productName.lower(), version),
+ }
+ }
+ }
+
+ if partialUpdates:
+ data["ftpFilenames"]["partials"] = {}
+ data["bouncerProducts"]["partials"] = {}
+ for previousVersion, previousInfo in partialUpdates.iteritems():
+ from_ = get_release_blob_name(productName, previousVersion,
+ previousInfo["buildNumber"],
+ self.dummy)
+ filename = "%s-%s-%s.partial.mar" % (productName.lower(), previousVersion, version)
+ bouncerProduct = "%s-%s-partial-%s" % (productName.lower(), version, previousVersion)
+ data["ftpFilenames"]["partials"][from_] = filename
+ data["bouncerProducts"]["partials"][from_] = bouncerProduct
+
+ return data
+
+
+class ReleaseCreatorV4(ReleaseCreatorBase):
+ def run(self, *args, **kwargs):
+ return ReleaseCreatorBase.run(self, *args, schemaVersion=4, **kwargs)
+
+ # Replaced by _get_fileUrls
+ def _get_update_data(self, *args, **kwargs):
+ return None
+
+ def _getFileUrls(self, productName, version, buildNumber, updateChannels,
+ ftpServer, bouncerServer, partialUpdates,
+ requiresMirrors=True):
+ data = {"fileUrls": {}}
+
+ # "*" is for the default set of fileUrls, which generally points at
+ # bouncer. It's helpful to have this to reduce duplication between
+ # the live channel and the cdntest channel (which eliminates the
+ # possibility that those two channels serve different contents).
+ uniqueChannels = ["*"]
+ for c in updateChannels:
+ # localtest channels are different than the default because they
+ # point directly at FTP rather than Bouncer.
+ if "localtest" in c:
+ uniqueChannels.append(c)
+ # beta and beta-cdntest are special, but only if requiresMirrors is
+ # set to False. This is typically used when generating beta channel
+ # updates as part of RC builds, which get shipped prior to the
+ # release being pushed to mirrors. This is a bit of a hack.
+ if not requiresMirrors and c in ("beta", "beta-cdntest",
+ "beta-dev", "beta-dev-cdntest"):
+ uniqueChannels.append(c)
+
+ for channel in uniqueChannels:
+ data["fileUrls"][channel] = {
+ "completes": {}
+ }
+ if "localtest" in channel:
+ pass
+ # dir_ = makeCandidatesDir(productName.lower(), version,
+ # buildNumber, server=ftpServer,
+ # protocol='http')
+ # filename = "%s-%s.complete.mar" % (productName.lower(), version)
+ # data["fileUrls"][channel]["completes"]["*"] = "%supdate/%%OS_FTP%%/%%LOCALE%%/%s" % (dir_, filename)
+ else:
+ # See comment above about these channels for explanation.
+ if not requiresMirrors and channel in ("beta", "beta-cdntest", "beta-dev", "beta-dev-cdntest"):
+ bouncerProduct = "%s-%sbuild%s-complete" % (productName.lower(), version, buildNumber)
+ else:
+ if productName.lower() == "fennec":
+ bouncerProduct = "%s-%s" % (productName.lower(), version)
+ else:
+ bouncerProduct = "%s-%s-complete" % (productName.lower(), version)
+ url = 'http://%s/?product=%s&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % (bouncerServer, bouncerProduct)
+ data["fileUrls"][channel]["completes"]["*"] = url
+
+ if not partialUpdates:
+ return data
+
+ for channel in uniqueChannels:
+ data["fileUrls"][channel]["partials"] = {}
+ for previousVersion, previousInfo in partialUpdates.iteritems():
+ from_ = get_release_blob_name(productName, previousVersion,
+ previousInfo["buildNumber"],
+ self.dummy)
+ if "localtest" in channel:
+ pass
+ # dir_ = makeCandidatesDir(productName.lower(), version,
+ # buildNumber, server=ftpServer,
+ # protocol='http')
+ # filename = "%s-%s-%s.partial.mar" % (productName.lower(), previousVersion, version)
+ # data["fileUrls"][channel]["partials"][from_] = "%supdate/%%OS_FTP%%/%%LOCALE%%/%s" % (dir_, filename)
+ else:
+ # See comment above about these channels for explanation.
+ if not requiresMirrors and channel in ("beta", "beta-cdntest", "beta-dev", "beta-dev-cdntest"):
+ bouncerProduct = "%s-%sbuild%s-partial-%sbuild%s" % (productName.lower(), version, buildNumber, previousVersion, previousInfo["buildNumber"])
+ else:
+ bouncerProduct = "%s-%s-partial-%s" % (productName.lower(), version, previousVersion)
+ url = 'http://%s/?product=%s&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % (bouncerServer, bouncerProduct)
+ data["fileUrls"][channel]["partials"][from_] = url
+
+ return data
+
+
+class NightlySubmitterBase(object):
+ build_type = 'nightly'
+
+ def __init__(self, api_root, auth, dummy=False, url_replacements=None):
+ self.api_root = api_root
+ self.auth = auth
+ self.dummy = dummy
+ self.url_replacements = url_replacements
+
+ def _replace_canocical_url(self, url):
+ if self.url_replacements:
+ for string_from, string_to in self.url_replacements:
+ if string_from in url:
+ new_url = url.replace(string_from, string_to)
+ log.warning("Replacing %s with %s", url, new_url)
+ return new_url
+
+ return url
+
+ def run(self, platform, buildID, productName, branch, appVersion, locale,
+ hashFunction, extVersion, schemaVersion, **updateKwargs):
+ assert schemaVersion in (3,4), 'Unhandled schema version %s' % schemaVersion
+ targets = buildbot2updatePlatforms(platform)
+ build_target = targets[0]
+ alias = None
+ if len(targets) > 1:
+ alias = targets[1:]
+ data = {
+ 'buildID': buildID,
+ 'appVersion': appVersion,
+ 'platformVersion': extVersion,
+ 'displayVersion': appVersion,
+ }
+
+ data.update(self._get_update_data(productName, branch, **updateKwargs))
+
+ if platform == 'android-api-9':
+ # Bug 1080749 - a hack to support api-9 and api-10+ split builds.
+ # Like 1055305, this is a hack to support two builds with same build target that
+ # require differed't release blobs and rules
+ build_type = 'api-9-%s' % self.build_type
+ else:
+ build_type = self.build_type
+
+ name = get_nightly_blob_name(productName, branch, build_type, buildID,
+ self.dummy)
+ api = SingleLocale(name=name, build_target=build_target, locale=locale,
+ auth=self.auth, api_root=self.api_root)
+
+ # wrap operations into "atomic" functions that can be retried
+ def update_dated():
+ current_data, data_version = api.get_data()
+ # If the partials are already a subset of the blob and the
+ # complete MAR is the same, skip the submission
+ skip_submission = bool(
+ current_data and
+ current_data.get("completes") == data.get("completes") and
+ all(p in current_data.get("partials", [])
+ for p in data.get("partials", [])))
+ if skip_submission:
+ log.warn("Dated data didn't change, skipping update")
+ return
+ # explicitly pass data version
+ api.update_build(
+ product=productName,
+ hashFunction=hashFunction,
+ buildData=json.dumps(merge_partial_updates(current_data,
+ data)),
+ alias=json.dumps(alias),
+ schemaVersion=schemaVersion, data_version=data_version)
+
+ # TODO: moggi: enable retry again
+ # retry(update_dated, sleeptime=10)
+ update_dated()
+
+ latest = SingleLocale(
+ api_root=self.api_root, auth=self.auth,
+ name=get_nightly_blob_name(productName, branch, build_type,
+ 'latest', self.dummy),
+ build_target=build_target, locale=locale)
+
+ def update_latest():
+ # copy everything over using target release's data version
+ latest_data, latest_data_version = latest.get_data()
+ source_data, _ = api.get_data()
+ if source_data == latest_data:
+ log.warn("Latest data didn't change, skipping update")
+ return
+ latest.update_build(
+ product=productName,
+ hashFunction=hashFunction, buildData=json.dumps(source_data),
+ alias=json.dumps(alias), schemaVersion=schemaVersion,
+ data_version=latest_data_version)
+
+ # TODO: moggi: enable retry again
+ # retry(update_latest, sleeptime=10)
+ update_latest()
+
+
+class MultipleUpdatesNightlyMixin(object):
+
+ def _get_update_data(self, productName, branch, completeInfo=None,
+ partialInfo=None):
+ data = {}
+
+ if completeInfo:
+ data["completes"] = []
+ for info in completeInfo:
+ if "from_buildid" in info:
+ from_ = get_nightly_blob_name(productName, branch,
+ self.build_type,
+ info["from_buildid"],
+ self.dummy)
+ else:
+ from_ = "*"
+ data["completes"].append({
+ "from": from_,
+ "filesize": info["size"],
+ "hashValue": info["hash"],
+ "fileUrl": self._replace_canocical_url(info["url"]),
+ })
+ if partialInfo:
+ data["partials"] = []
+ for info in partialInfo:
+ data["partials"].append({
+ "from": get_nightly_blob_name(productName, branch,
+ self.build_type,
+ info["from_buildid"],
+ self.dummy),
+ "filesize": info["size"],
+ "hashValue": info["hash"],
+ "fileUrl": self._replace_canocical_url(info["url"]),
+ })
+
+ return data
+
+
+class NightlySubmitterV3(NightlySubmitterBase, MultipleUpdatesNightlyMixin):
+ def run(self, *args, **kwargs):
+ return NightlySubmitterBase.run(self, *args, schemaVersion=3, **kwargs)
+
+
+class NightlySubmitterV4(NightlySubmitterBase, MultipleUpdatesNightlyMixin):
+ def run(self, *args, **kwargs):
+ return NightlySubmitterBase.run(self, *args, schemaVersion=4, **kwargs)
+
+
+class ReleaseSubmitterBase(object):
+ def __init__(self, api_root, auth, dummy=False):
+ self.api_root = api_root
+ self.auth = auth
+ self.dummy = dummy
+
+ def run(self, platform, productName, appVersion, version, build_number, locale,
+ hashFunction, extVersion, buildID, schemaVersion, **updateKwargs):
+ assert schemaVersion in (3, 4), 'Unhandled schema version %s' % schemaVersion
+ targets = buildbot2updatePlatforms(platform)
+ # Some platforms may have alias', but those are set-up elsewhere
+ # for release blobs.
+ build_target = targets[0]
+
+ name = get_release_blob_name(productName, version, build_number,
+ self.dummy)
+ data = {
+ 'buildID': buildID,
+ 'appVersion': appVersion,
+ 'platformVersion': extVersion,
+ 'displayVersion': getPrettyVersion(version)
+ }
+
+ data.update(self._get_update_data(productName, version, build_number,
+ **updateKwargs))
+
+ api = SingleLocale(name=name, build_target=build_target, locale=locale,
+ auth=self.auth, api_root=self.api_root)
+ schemaVersion = json.dumps(schemaVersion)
+ current_data, data_version = api.get_data()
+ api.update_build(
+ data_version=data_version,
+ product=productName, hashFunction=hashFunction,
+ buildData=json.dumps(merge_partial_updates(current_data, data)),
+ schemaVersion=schemaVersion)
+
+
+class MultipleUpdatesReleaseMixin(object):
+ def _get_update_data(self, productName, version, build_number,
+ completeInfo=None, partialInfo=None):
+ data = {}
+
+ if completeInfo:
+ data["completes"] = []
+ for info in completeInfo:
+ if "previousVersion" in info:
+ from_ = get_release_blob_name(productName, version,
+ build_number, self.dummy)
+ else:
+ from_ = "*"
+ data["completes"].append({
+ "from": from_,
+ "filesize": info["size"],
+ "hashValue": info["hash"],
+ })
+ if partialInfo:
+ data["partials"] = []
+ for info in partialInfo:
+ data["partials"].append({
+ "from": get_release_blob_name(productName,
+ info["previousVersion"],
+ info["previousBuildNumber"] ,
+ self.dummy),
+ "filesize": info["size"],
+ "hashValue": info["hash"],
+ })
+
+ return data
+
+
+class ReleaseSubmitterV3(ReleaseSubmitterBase, MultipleUpdatesReleaseMixin):
+ def run(self, *args, **kwargs):
+ return ReleaseSubmitterBase.run(self, *args, schemaVersion=3, **kwargs)
+
+
+class ReleaseSubmitterV4(ReleaseSubmitterBase, MultipleUpdatesReleaseMixin):
+ def run(self, *args, **kwargs):
+ return ReleaseSubmitterBase.run(self, *args, schemaVersion=4, **kwargs)
+
+
+class ReleasePusher(object):
+ def __init__(self, api_root, auth, dummy=False):
+ self.api_root = api_root
+ self.auth = auth
+ self.dummy = dummy
+
+ def run(self, productName, version, build_number, rule_ids):
+ name = get_release_blob_name(productName, version, build_number,
+ self.dummy)
+ for rule_id in rule_ids:
+ Rule(api_root=self.api_root, auth=self.auth, rule_id=rule_id
+ ).update_rule(mapping=name)
+
+
+class BlobTweaker(object):
+ def __init__(self, api_root, auth):
+ self.api_root = api_root
+ self.auth = auth
+
+ def run(self, name, data):
+ api = Release(name=name, auth=self.auth, api_root=self.api_root)
+ current_data, data_version = api.get_data()
+ data = recursive_update(current_data, data)
+ api.update_release(
+ product=name.split('-')[0],
+ hashFunction=data['hashFunction'], releaseData=json.dumps(data),
+ data_version=data_version,
+ schemaVersion=current_data['schema_version'])
+
diff --git a/bin/update/balrog/submitter/cli.pyc b/bin/update/balrog/submitter/cli.pyc
new file mode 100644
index 000000000000..792e28e85ff0
--- /dev/null
+++ b/bin/update/balrog/submitter/cli.pyc
Binary files differ
diff --git a/bin/update/balrog/submitter/updates.py b/bin/update/balrog/submitter/updates.py
new file mode 100644
index 000000000000..cb7154a96474
--- /dev/null
+++ b/bin/update/balrog/submitter/updates.py
@@ -0,0 +1,26 @@
+import site
+import os
+
+site.addsitedir(os.path.join(os.path.dirname(__file__), "..", ".."))
+import jsonmerge
+
+
+def merge_partial_updates(base_obj, new_obj):
+ """Merges 2 update objects, merging partials and replacing completes"""
+ schema = {
+ "properties": {
+ # Merge partials using "from" as an identifier field
+ "partials": {
+ "mergeStrategy": "arrayMergeById",
+ "mergeOptions": {
+ "idRef": "from"
+ }
+ },
+ # Replace completes - we don't usually have more than one
+ "completes": {
+ "mergeStrategy": "overwrite"
+ }
+ }
+ }
+ merger = jsonmerge.Merger(schema=schema)
+ return merger.merge(base_obj, new_obj)
diff --git a/bin/update/balrog/submitter/updates.pyc b/bin/update/balrog/submitter/updates.pyc
new file mode 100644
index 000000000000..47da41d48a74
--- /dev/null
+++ b/bin/update/balrog/submitter/updates.pyc
Binary files differ
diff --git a/bin/update/release/__init__.py b/bin/update/release/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
--- /dev/null
+++ b/bin/update/release/__init__.py
diff --git a/bin/update/release/__init__.pyc b/bin/update/release/__init__.pyc
new file mode 100644
index 000000000000..d046716333ec
--- /dev/null
+++ b/bin/update/release/__init__.pyc
Binary files differ
diff --git a/bin/update/release/info.py b/bin/update/release/info.py
new file mode 100644
index 000000000000..323609190286
--- /dev/null
+++ b/bin/update/release/info.py
@@ -0,0 +1,2 @@
+def getProductDetails():
+ return ""
diff --git a/bin/update/release/info.pyc b/bin/update/release/info.pyc
new file mode 100644
index 000000000000..39e2294ad6d6
--- /dev/null
+++ b/bin/update/release/info.pyc
Binary files differ
diff --git a/bin/update/release/platforms.py b/bin/update/release/platforms.py
new file mode 100644
index 000000000000..a265f5a72926
--- /dev/null
+++ b/bin/update/release/platforms.py
@@ -0,0 +1,8 @@
+def buildbot2bouncer():
+ pass
+
+def buildbot2updatePlatforms(platform):
+ return platform
+
+def buildbot2ftp():
+ pass
diff --git a/bin/update/release/platforms.pyc b/bin/update/release/platforms.pyc
new file mode 100644
index 000000000000..c0bd9b056ec1
--- /dev/null
+++ b/bin/update/release/platforms.pyc
Binary files differ
diff --git a/bin/update/release/versions.py b/bin/update/release/versions.py
new file mode 100644
index 000000000000..5db137fd171e
--- /dev/null
+++ b/bin/update/release/versions.py
@@ -0,0 +1,2 @@
+def getPrettyVersion(version):
+ return version
diff --git a/bin/update/release/versions.pyc b/bin/update/release/versions.pyc
new file mode 100644
index 000000000000..8b2afacda1e9
--- /dev/null
+++ b/bin/update/release/versions.pyc
Binary files differ
diff --git a/bin/update/util/__init__.py b/bin/update/util/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
--- /dev/null
+++ b/bin/update/util/__init__.py
diff --git a/bin/update/util/__init__.pyc b/bin/update/util/__init__.pyc
new file mode 100644
index 000000000000..7e33d6798095
--- /dev/null
+++ b/bin/update/util/__init__.pyc
Binary files differ
diff --git a/bin/update/util/algorithms.py b/bin/update/util/algorithms.py
new file mode 100644
index 000000000000..5ebe648d5f00
--- /dev/null
+++ b/bin/update/util/algorithms.py
@@ -0,0 +1,10 @@
+import collections
+
+def recursive_update(d, u):
+ for k, v in u.iteritems():
+ if isinstance(v, collections.Mapping):
+ r = recursive_update(d.get(k, {}), v)
+ d[k] = r
+ else:
+ d[k] = u[k]
+ return d
diff --git a/bin/update/util/algorithms.pyc b/bin/update/util/algorithms.pyc
new file mode 100644
index 000000000000..f365ae37f5bf
--- /dev/null
+++ b/bin/update/util/algorithms.pyc
Binary files differ