2
0
mirror of https://github.com/fork-maintainers/iceraven-browser synced 2024-11-05 21:20:45 +00:00

Bug 1808607 - part 6: Remove fenix taskcluster leftovers

(cherry picked from commit 1bf4a0f1fa68ce41c07fc3bcfe932834d48d2194)
This commit is contained in:
Johan Lorenzo 2023-01-31 16:31:14 +01:00 committed by mergify[bot]
parent f4fe4a59bd
commit d2e5dd5f37
6 changed files with 0 additions and 641 deletions

View File

@ -1,27 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from importlib import import_module
def register(graph_config):
"""
Import all modules that are siblings of this one, triggering decorators in
the process.
"""
_import_modules(
[
"job",
"parameters",
"release_promotion",
"routes",
"target_tasks",
"worker_types",
]
)
def _import_modules(modules):
for module in modules:
import_module(".{}".format(module), package=__name__)

View File

@ -1,202 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
from mozilla_version.fenix import FenixVersion
from taskgraph.actions.registry import register_callback_action
from taskgraph.util.taskcluster import get_artifact
from taskgraph.taskgraph import TaskGraph
from taskgraph.decision import taskgraph_decision
from taskgraph.parameters import Parameters
from taskgraph.util.taskgraph import (
find_decision_task,
find_existing_tasks_from_previous_kinds,
)
RELEASE_PROMOTION_PROJECTS = (
"https://github.com/mozilla-mobile/fenix",
"https://github.com/mozilla-releng/staging-fenix",
)
def is_release_promotion_available(parameters):
return parameters["head_repository"] in RELEASE_PROMOTION_PROJECTS
@register_callback_action(
name="release-promotion",
title="Release Promotion",
symbol="${input.release_promotion_flavor}",
description="Promote a release.",
generic=False,
order=500,
context=[],
available=is_release_promotion_available,
schema=lambda graph_config: {
"type": "object",
"properties": {
"build_number": {
"type": "integer",
"default": 1,
"minimum": 1,
"title": "The release build number",
"description": (
"The release build number. Starts at 1 per "
"release version, and increments on rebuild."
),
},
"do_not_optimize": {
"type": "array",
"description": (
"Optional: a list of labels to avoid optimizing out "
"of the graph (to force a rerun of, say, "
"funsize docker-image tasks)."
),
"items": {
"type": "string",
},
},
"revision": {
"type": "string",
"title": "Optional: revision to ship",
"description": ("Optional: the revision to ship."),
},
"release_promotion_flavor": {
"type": "string",
"description": "The flavor of release promotion to perform.",
"default": "build",
"enum": sorted(graph_config["release-promotion"]["flavors"].keys()),
},
"rebuild_kinds": {
"type": "array",
"description": (
"Optional: an array of kinds to ignore from the previous "
"graph(s)."
),
"items": {
"type": "string",
},
},
"previous_graph_ids": {
"type": "array",
"description": (
"Optional: an array of taskIds of decision or action "
"tasks from the previous graph(s) to use to populate "
"our `previous_graph_kinds`."
),
"items": {
"type": "string",
},
},
"version": {
"type": "string",
"description": (
"Optional: override the version for release promotion. "
"Occasionally we'll land a taskgraph fix in a later "
"commit, but want to act on a build from a previous "
"commit. If a version bump has landed in the meantime, "
"relying on the in-tree version will break things."
),
"default": "",
},
"next_version": {
"type": "string",
"description": "Next version.",
"default": "",
},
},
"required": [
"release_promotion_flavor",
"version",
"build_number",
"next_version",
],
},
)
def release_promotion_action(parameters, graph_config, input, task_group_id, task_id):
release_promotion_flavor = input["release_promotion_flavor"]
promotion_config = graph_config["release-promotion"]["flavors"][
release_promotion_flavor
]
target_tasks_method = promotion_config["target-tasks-method"].format(
project=parameters["project"]
)
rebuild_kinds = input.get("rebuild_kinds") or promotion_config.get(
"rebuild-kinds", []
)
do_not_optimize = input.get("do_not_optimize") or promotion_config.get(
"do-not-optimize", []
)
# make parameters read-write
parameters = dict(parameters)
# Build previous_graph_ids from ``previous_graph_ids`` or ``revision``.
previous_graph_ids = input.get("previous_graph_ids")
if not previous_graph_ids:
previous_graph_ids = [find_decision_task(parameters, graph_config)]
# Download parameters from the first decision task
parameters = get_artifact(previous_graph_ids[0], "public/parameters.yml")
# Download and combine full task graphs from each of the previous_graph_ids.
# Sometimes previous relpro action tasks will add tasks, like partials,
# that didn't exist in the first full_task_graph, so combining them is
# important. The rightmost graph should take precedence in the case of
# conflicts.
combined_full_task_graph = {}
for graph_id in previous_graph_ids:
full_task_graph = get_artifact(graph_id, "public/full-task-graph.json")
combined_full_task_graph.update(full_task_graph)
_, combined_full_task_graph = TaskGraph.from_json(combined_full_task_graph)
parameters["existing_tasks"] = find_existing_tasks_from_previous_kinds(
combined_full_task_graph, previous_graph_ids, rebuild_kinds
)
parameters["do_not_optimize"] = do_not_optimize
parameters["target_tasks_method"] = target_tasks_method
parameters["build_number"] = int(input["build_number"])
# When doing staging releases on try, we still want to re-use tasks from
# previous graphs.
parameters["optimize_target_tasks"] = True
parameters["shipping_phase"] = input["release_promotion_flavor"]
version_in_file = read_version_file()
parameters["version"] = (
input["version"] if input.get("version") else read_version_file()
)
version_string = parameters["version"]
if version_string != version_in_file:
raise ValueError(
"Version given in tag ({}) does not match the one in version.txt ({})".format(
version_string, version_in_file
)
)
parameters["head_tag"] = "v{}".format(version_string)
parameters["next_version"] = input["next_version"]
version = FenixVersion.parse(version_string)
if version.is_beta:
release_type = "beta"
elif version.is_release:
release_type = "release"
elif version.is_release_candidate:
release_type = "release"
else:
raise ValueError("Unsupported version type: {}".format(version.version_type))
parameters["release_type"] = release_type
parameters["tasks_for"] = "action"
parameters["pull_request_number"] = None
# make parameters read-only
parameters = Parameters(**parameters)
taskgraph_decision({"root": graph_config.root_dir}, parameters=parameters)
def read_version_file():
with open(os.path.join(os.path.dirname(__file__), "..", "..", "version.txt")) as f:
return f.read().strip()

View File

@ -1,39 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import time
from taskgraph.transforms.task import index_builder
# Please ping the performance testing team (whawkins at the moment) if these routes change.
# In the future, notifying consumers may be easier (https://bugzilla.mozilla.org/show_bug.cgi?id=1548810), but
# we need to remember to tell users for the time being
SIGNING_ROUTE_TEMPLATES = [
"index.{trust-domain}.v2.{project}.{variant}.latest.{abi}",
"index.{trust-domain}.v2.{project}.{variant}.{build_date}.revision.{head_rev}.{abi}",
"index.{trust-domain}.v2.{project}.{variant}.{build_date}.latest.{abi}",
"index.{trust-domain}.v2.{project}.{variant}.revision.{head_rev}.{abi}",
]
@index_builder("signing")
def add_signing_indexes(config, task):
if config.params["level"] != "3":
return task
subs = config.params.copy()
subs["build_date"] = time.strftime(
"%Y.%m.%d", time.gmtime(config.params["build_date"])
)
subs["trust-domain"] = config.graph_config["trust-domain"]
subs["variant"] = task["attributes"]["build-type"]
unique_routes = set()
for tpl in SIGNING_ROUTE_TEMPLATES:
for abi in task["attributes"]["apks"].keys():
subs["abi"] = abi
unique_routes.add(tpl.format(**subs))
task.setdefault("routes", sorted(list(unique_routes)))
return task

View File

@ -1,119 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
from redo import retry
from taskgraph.target_tasks import _target_task
from taskgraph.util.taskcluster import find_task_id
def index_exists(index_path, reason=""):
print(f"Looking for existing index {index_path} {reason}...")
try:
task_id = find_task_id(index_path)
print(f"Index {index_path} exists: taskId {task_id}")
return True
except KeyError:
print(f"Index {index_path} doesn't exist.")
return False
@_target_task("promote")
def target_tasks_promote(full_task_graph, parameters, graph_config):
def filter(task, parameters):
if (
task.attributes.get("release-type") == parameters["release_type"]
and task.attributes.get("shipping_phase") == "promote"
):
return True
return [l for l, t in full_task_graph.tasks.items() if filter(t, parameters)]
@_target_task("ship")
def target_tasks_ship(full_task_graph, parameters, graph_config):
filtered_for_candidates = target_tasks_promote(
full_task_graph,
parameters,
graph_config,
)
def filter(task, parameters):
# Include promotion tasks; these will be optimized out
if task.label in filtered_for_candidates:
return True
if (
task.attributes.get("release-type") == parameters["release_type"]
and task.attributes.get("shipping_phase") == "ship"
):
return True
return [l for l, t in full_task_graph.tasks.items() if filter(t, parameters)]
@_target_task("nightly")
def target_tasks_nightly(full_task_graph, parameters, graph_config):
"""Select the set of tasks required for a nightly build."""
def filter(task, parameters):
return task.attributes.get("nightly", False)
index_path = (
f"{graph_config['trust-domain']}.v2.{parameters['project']}.branch."
f"{parameters['head_ref']}.revision.{parameters['head_rev']}.taskgraph.decision-nightly"
)
if os.environ.get("MOZ_AUTOMATION") and retry(
index_exists,
args=(index_path,),
kwargs={
"reason": "to avoid triggering multiple nightlies off the same revision",
},
):
return []
return [l for l, t in full_task_graph.tasks.items() if filter(t, parameters)]
@_target_task("nightly-test")
def target_tasks_nightly_test(full_task_graph, parameters, graph_config):
"""Select the set of tasks required for a nightly build."""
def filter(task, parameters):
return task.attributes.get("nightly-test", False)
return [l for l, t in full_task_graph.tasks.items() if filter(t, parameters)]
def _filter_fennec(fennec_type, task, parameters):
return task.attributes.get("build-type", "") == "fennec-{}".format(fennec_type)
@_target_task("fennec-production")
def target_tasks_fennec_nightly(full_task_graph, parameters, graph_config):
"""Select the set of tasks required for a production build signed with the fennec key."""
return [
l
for l, t in full_task_graph.tasks.items()
if _filter_fennec("production", t, parameters)
]
@_target_task("screenshots")
def target_tasks_screnshots(full_task_graph, parameters, graph_config):
"""Select the set of tasks required to generate screenshots on a real device."""
def filter(task, parameters):
return task.attributes.get("screenshots", False)
return [l for l, t in full_task_graph.tasks.items() if filter(t, parameters)]
@_target_task("legacy_api_ui_tests")
def target_tasks_legacy_api_ui_tests(full_task_graph, parameters, graph_config):
"""Select the set of tasks required to run select UI tests on other API."""
def filter(task, parameters):
return task.attributes.get("legacy", False)
return [l for l, t in full_task_graph.tasks.items() if filter(t, parameters)]

View File

@ -1,254 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from voluptuous import Any, Required, Optional
from taskgraph.util.schema import taskref_or_string
from taskgraph.transforms.task import payload_builder
@payload_builder(
"scriptworker-signing",
schema={
# the maximum time to run, in seconds
Required("max-run-time"): int,
Required("signing-type"): str,
# list of artifact URLs for the artifacts that should be signed
Required("upstream-artifacts"): [
{
# taskId of the task with the artifact
Required("taskId"): taskref_or_string,
# type of signing task (for CoT)
Required("taskType"): str,
# Paths to the artifacts to sign
Required("paths"): [str],
# Signing formats to use on each of the paths
Required("formats"): [str],
}
],
},
)
def build_scriptworker_signing_payload(config, task, task_def):
worker = task["worker"]
task_def["tags"]["worker-implementation"] = "scriptworker"
task_def["payload"] = {
"maxRunTime": worker["max-run-time"],
"upstreamArtifacts": worker["upstream-artifacts"],
}
formats = set()
for artifacts in worker["upstream-artifacts"]:
formats.update(artifacts["formats"])
scope_prefix = config.graph_config["scriptworker"]["scope-prefix"]
task_def["scopes"].append(
"{}:signing:cert:{}".format(scope_prefix, worker["signing-type"])
)
task_def["scopes"].extend(
[
"{}:signing:format:{}".format(scope_prefix, format)
for format in sorted(formats)
]
)
@payload_builder(
"scriptworker-beetmover",
schema={
Required("action"): str,
Required("version"): str,
Required("artifact-map"): [
{
Required("paths"): {
Any(str): {
Required("destinations"): [str],
},
},
Required("taskId"): taskref_or_string,
}
],
Required("beetmover-application-name"): str,
Required("bucket"): str,
Required("upstream-artifacts"): [
{
Required("taskId"): taskref_or_string,
Required("taskType"): str,
Required("paths"): [str],
}
],
},
)
def build_scriptworker_beetmover_payload(config, task, task_def):
worker = task["worker"]
task_def["tags"]["worker-implementation"] = "scriptworker"
# Needed by beetmover-scriptworker
for map_ in worker["artifact-map"]:
map_["locale"] = "multi"
for path_config in map_["paths"].values():
path_config["checksums_path"] = ""
task_def["payload"] = {
"artifactMap": worker["artifact-map"],
"releaseProperties": {"appName": worker.pop("beetmover-application-name")},
"upstreamArtifacts": worker["upstream-artifacts"],
"version": worker["version"],
}
scope_prefix = config.graph_config["scriptworker"]["scope-prefix"]
task_def["scopes"].extend(
[
"{}:beetmover:action:{}".format(scope_prefix, worker["action"]),
"{}:beetmover:bucket:{}".format(scope_prefix, worker["bucket"]),
]
)
@payload_builder(
"scriptworker-pushapk",
schema={
Required("upstream-artifacts"): [
{
Required("taskId"): taskref_or_string,
Required("taskType"): str,
Required("paths"): [str],
}
],
Required("certificate-alias"): str,
Required("channel"): str,
Required("commit"): bool,
Required("product"): str,
Required("dep"): bool,
Optional("google-play-track"): str,
},
)
def build_push_apk_payload(config, task, task_def):
worker = task["worker"]
task_def["tags"]["worker-implementation"] = "scriptworker"
task_def["payload"] = {
"certificate_alias": worker["certificate-alias"],
"channel": worker["channel"],
"commit": worker["commit"],
"upstreamArtifacts": worker["upstream-artifacts"],
}
if worker.get("google-play-track"):
task_def["payload"]["google_play_track"] = worker["google-play-track"]
scope_prefix = config.graph_config["scriptworker"]["scope-prefix"]
task_def["scopes"].append(
"{}:googleplay:product:{}{}".format(
scope_prefix, worker["product"], ":dep" if worker["dep"] else ""
)
)
@payload_builder(
"scriptworker-shipit",
schema={
Required("upstream-artifacts"): [
{
Required("taskId"): taskref_or_string,
Required("taskType"): str,
Required("paths"): [str],
}
],
Required("release-name"): str,
},
)
def build_shipit_payload(config, task, task_def):
worker = task["worker"]
task_def["tags"]["worker-implementation"] = "scriptworker"
task_def["payload"] = {"release_name": worker["release-name"]}
@payload_builder(
"scriptworker-github",
schema={
Required("upstream-artifacts"): [
{
Required("taskId"): taskref_or_string,
Required("taskType"): str,
Required("paths"): [str],
}
],
Required("artifact-map"): [object],
Required("action"): str,
Required("git-tag"): str,
Required("git-revision"): str,
Required("github-project"): str,
Required("is-prerelease"): bool,
Required("release-name"): str,
},
)
def build_github_release_payload(config, task, task_def):
worker = task["worker"]
task_def["tags"]["worker-implementation"] = "scriptworker"
task_def["payload"] = {
"artifactMap": worker["artifact-map"],
"gitTag": worker["git-tag"],
"gitRevision": worker["git-revision"],
"isPrerelease": worker["is-prerelease"],
"releaseName": worker["release-name"],
"upstreamArtifacts": worker["upstream-artifacts"],
}
scope_prefix = config.graph_config["scriptworker"]["scope-prefix"]
task_def["scopes"].extend(
[
"{}:github:project:{}".format(scope_prefix, worker["github-project"]),
"{}:github:action:{}".format(scope_prefix, worker["action"]),
]
)
@payload_builder(
"scriptworker-tree",
schema={
Optional("upstream-artifacts"): [
{
Optional("taskId"): taskref_or_string,
Optional("taskType"): str,
Optional("paths"): [str],
}
],
Required("bump"): bool,
Optional("bump-files"): [str],
Optional("push"): bool,
Optional("branch"): str,
},
)
def build_version_bump_payload(config, task, task_def):
worker = task["worker"]
task_def["tags"]["worker-implementation"] = "scriptworker"
task_def["payload"] = {"actions": []}
actions = task_def["payload"]["actions"]
if worker["bump"]:
if not worker["bump-files"]:
raise Exception("Version Bump requested without bump-files")
bump_info = {}
bump_info["next_version"] = config.params["next_version"]
bump_info["files"] = worker["bump-files"]
task_def["payload"]["version_bump_info"] = bump_info
actions.append("version_bump")
if worker["push"]:
task_def["payload"]["push"] = True
if worker.get("force-dry-run"):
task_def["payload"]["dry_run"] = True
if worker.get("branch"):
task_def["payload"]["branch"] = worker["branch"]