Source code

Revision control

Other Tools

1
# This Source Code Form is subject to the terms of the Mozilla Public
2
# License, v. 2.0. If a copy of the MPL was not distributed with this
3
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
4
5
from __future__ import absolute_import, print_function, unicode_literals
6
7
import os
8
import re
9
10
from collections import deque
11
from six import text_type
12
import taskgraph
13
from taskgraph.transforms.base import TransformSequence
14
from taskgraph.transforms.task import _run_task_suffix
15
from .. import GECKO
16
from taskgraph.util.docker import (
17
generate_context_hash,
18
)
19
from taskgraph.util.taskcluster import get_root_url
20
from taskgraph.util.schema import (
21
Schema,
22
)
23
from voluptuous import (
24
Optional,
25
Required,
26
)
27
from .task import task_description_schema
28
29
DIGEST_RE = re.compile('^[0-9a-f]{64}$')
30
31
transforms = TransformSequence()
32
33
docker_image_schema = Schema({
34
# Name of the docker image.
35
Required('name'): text_type,
36
37
# Name of the parent docker image.
38
Optional('parent'): text_type,
39
40
# Treeherder symbol.
41
Required('symbol'): text_type,
42
43
# relative path (from config.path) to the file the docker image was defined
44
# in.
45
Optional('job-from'): text_type,
46
47
# Arguments to use for the Dockerfile.
48
Optional('args'): {text_type: text_type},
49
50
# Name of the docker image definition under taskcluster/docker, when
51
# different from the docker image name.
52
Optional('definition'): text_type,
53
54
# List of package tasks this docker image depends on.
55
Optional('packages'): [text_type],
56
57
Optional(
58
"index",
59
description="information for indexing this build so its artifacts can be discovered",
60
): task_description_schema['index'],
61
62
Optional(
63
"cache",
64
description="Whether this image should be cached based on inputs.",
65
): bool,
66
})
67
68
69
transforms.add_validate(docker_image_schema)
70
71
72
def order_image_tasks(config, tasks):
73
"""Iterate image tasks in an order where parent images come first."""
74
pending = deque(tasks)
75
task_names = {task['name'] for task in pending}
76
emitted = set()
77
while True:
78
try:
79
task = pending.popleft()
80
except IndexError:
81
break
82
parent = task.get('parent')
83
if parent and parent not in emitted:
84
if parent not in task_names:
85
raise Exception('Missing parent image for {}-{}: {}'.format(
86
config.kind, task['name'], parent))
87
pending.append(task)
88
continue
89
emitted.add(task['name'])
90
yield task
91
92
93
@transforms.add
94
def fill_template(config, tasks):
95
available_packages = set()
96
for task in config.kind_dependencies_tasks:
97
if task.kind != 'packages':
98
continue
99
name = task.label.replace('packages-', '')
100
available_packages.add(name)
101
102
context_hashes = {}
103
104
for task in order_image_tasks(config, tasks):
105
image_name = task.pop('name')
106
job_symbol = task.pop('symbol')
107
args = task.pop('args', {})
108
definition = task.pop('definition', image_name)
109
packages = task.pop('packages', [])
110
parent = task.pop('parent', None)
111
112
for p in packages:
113
if p not in available_packages:
114
raise Exception('Missing package job for {}-{}: {}'.format(
115
config.kind, image_name, p))
116
117
# Generating the context hash relies on arguments being set, so we
118
# set this now, although it's not the final value (it's a
119
# task-reference value, see further below). We add the package routes
120
# containing a hash to get the overall docker image hash, so changes
121
# to packages will be reflected in the docker image hash.
122
args['DOCKER_IMAGE_PACKAGES'] = ' '.join('<{}>'.format(p)
123
for p in packages)
124
if parent:
125
args['DOCKER_IMAGE_PARENT'] = '{}:{}'.format(parent, context_hashes[parent])
126
127
args['TASKCLUSTER_ROOT_URL'] = get_root_url(False)
128
129
if not taskgraph.fast:
130
context_path = os.path.join('taskcluster', 'docker', definition)
131
context_hash = generate_context_hash(
132
GECKO, context_path, image_name, args)
133
else:
134
context_hash = '0'*40
135
digest_data = [context_hash]
136
context_hashes[image_name] = context_hash
137
138
description = 'Build the docker image {} for use by dependent tasks'.format(
139
image_name)
140
141
# Adjust the zstandard compression level based on the execution level.
142
# We use faster compression for level 1 because we care more about
143
# end-to-end times. We use slower/better compression for other levels
144
# because images are read more often and it is worth the trade-off to
145
# burn more CPU once to reduce image size.
146
zstd_level = '3' if int(config.params['level']) == 1 else '10'
147
148
# include some information that is useful in reconstructing this task
149
# from JSON
150
taskdesc = {
151
'label': 'build-docker-image-' + image_name,
152
'description': description,
153
'attributes': {'image_name': image_name},
154
'expires-after': '28 days' if config.params.is_try() else '1 year',
155
'scopes': [
156
'secrets:get:project/taskcluster/gecko/hgfingerprint',
157
'secrets:get:project/taskcluster/gecko/hgmointernal',
158
],
159
'treeherder': {
160
'symbol': job_symbol,
161
'platform': 'taskcluster-images/opt',
162
'kind': 'other',
163
'tier': 1,
164
},
165
'run-on-projects': [],
166
'worker-type': 'images',
167
'worker': {
168
'implementation': 'docker-worker',
169
'os': 'linux',
170
'artifacts': [{
171
'type': 'file',
172
'path': '/builds/worker/workspace/artifacts/image.tar.zst',
173
'name': 'public/image.tar.zst',
174
}],
175
'env': {
176
'HG_STORE_PATH': '/builds/worker/checkouts/hg-store',
177
'HASH': context_hash,
178
'PROJECT': config.params['project'],
179
'IMAGE_NAME': image_name,
180
'DOCKER_IMAGE_ZSTD_LEVEL': zstd_level,
181
'GECKO_BASE_REPOSITORY': config.params['base_repository'],
182
'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
183
'GECKO_HEAD_REV': config.params['head_rev'],
184
},
185
'chain-of-trust': True,
186
'docker-in-docker': True,
187
'taskcluster-proxy': True,
188
'max-run-time': 7200,
189
# Retry on apt-get errors.
190
'retry-exit-status': [100],
191
},
192
}
193
# Retry for 'funsize-update-generator' if exit status code is -1
194
if image_name in ['funsize-update-generator']:
195
taskdesc['worker']['retry-exit-status'] = [-1]
196
197
worker = taskdesc['worker']
198
199
# We use the in-tree image_builder image to build docker images, but
200
# that can't be used to build the image_builder image itself,
201
# obviously. So we fall back to an image on docker hub, identified
202
# by hash. After the image-builder image is updated, it's best to push
203
# and update this hash as well, to keep image-builder builds up to date.
204
if image_name == 'image_builder':
205
hash = 'sha256:c6622fd3e5794842ad83d129850330b26e6ba671e39c58ee288a616a3a1c4c73'
206
worker['docker-image'] = 'taskcluster/image_builder@' + hash
207
# Keep in sync with the Dockerfile used to generate the
208
# docker image whose digest is referenced above.
209
worker['volumes'] = [
210
'/builds/worker/checkouts',
211
'/builds/worker/workspace',
212
]
213
cache_name = 'imagebuilder-v1'
214
else:
215
worker['docker-image'] = {'in-tree': 'image_builder'}
216
cache_name = 'imagebuilder-sparse-{}'.format(_run_task_suffix())
217
# Force images built against the in-tree image builder to
218
# have a different digest by adding a fixed string to the
219
# hashed data.
220
# Append to this data whenever the image builder's output behavior
221
# is changed, in order to force all downstream images to be rebuilt and
222
# cached distinctly.
223
digest_data.append('image_builder')
224
# Updated for squashing images in Bug 1527394
225
digest_data.append('squashing layers')
226
227
worker['caches'] = [{
228
'type': 'persistent',
229
'name': cache_name,
230
'mount-point': '/builds/worker/checkouts',
231
}]
232
233
for k, v in args.items():
234
if k == 'DOCKER_IMAGE_PACKAGES':
235
worker['env'][k] = {'task-reference': v}
236
else:
237
worker['env'][k] = v
238
239
if packages:
240
deps = taskdesc.setdefault('dependencies', {})
241
for p in sorted(packages):
242
deps[p] = 'packages-{}'.format(p)
243
244
if parent:
245
deps = taskdesc.setdefault('dependencies', {})
246
deps[parent] = 'build-docker-image-{}'.format(parent)
247
worker['env']['DOCKER_IMAGE_PARENT_TASK'] = {
248
'task-reference': '<{}>'.format(parent),
249
}
250
if 'index' in task:
251
taskdesc['index'] = task['index']
252
253
if task.get('cache', True) and not taskgraph.fast:
254
taskdesc['cache'] = {
255
'type': 'docker-images.v2',
256
'name': image_name,
257
'digest-data': digest_data,
258
}
259
260
yield taskdesc