mirror of
https://chromium.googlesource.com/crosvm/crosvm
synced 2024-10-23 04:46:29 +00:00
infra: Add recipes and example builder
Following the onboarding instructions. If everything works we should have a builder at: https://ci.chromium.org/p/crosvm/builders/ci/Example%20Builder BUG=chromium:1300370 TEST=None Change-Id: I98eeaad6ccffd228fdee116e664c9d2760708e24 Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/platform/crosvm/+/3500817 Reviewed-by: Dmitry Torokhov <dtor@chromium.org> Tested-by: kokoro <noreply+kokoro@google.com>
This commit is contained in:
parent
33a368ffaf
commit
793004384d
12 changed files with 351 additions and 3 deletions
1
infra/.gitignore
vendored
Normal file
1
infra/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
.recipe_deps
|
9
infra/README.md
Normal file
9
infra/README.md
Normal file
|
@ -0,0 +1,9 @@
|
|||
# WIP Luci Infrastructure
|
||||
|
||||
This directory contains the configuration and build recipes run by our luci infrastructure for CI
|
||||
and presubmit testing. This is currently a work in progress.
|
||||
|
||||
See [Kokoro](../ci/kokoro) configs for the actively used presubmit system.
|
||||
|
||||
Note: Luci applies config and recipes changes asynchronously. Do not submit changes to this
|
||||
directory in the same commit as changes to other crosvm source.
|
17
infra/README.recipes.md
Normal file
17
infra/README.recipes.md
Normal file
|
@ -0,0 +1,17 @@
|
|||
<!--- AUTOGENERATED BY `./recipes.py test train` -->
|
||||
# Repo documentation for [crosvm](https://chromium.googlesource.com/crosvm/crosvm.git)
|
||||
## Table of Contents
|
||||
|
||||
**[Recipes](#Recipes)**
|
||||
* [hello_world](#recipes-hello_world)
|
||||
## Recipes
|
||||
|
||||
### *recipes* / [hello\_world](/infra/recipes/hello_world.py)
|
||||
|
||||
[DEPS](/infra/recipes/hello_world.py#7): [recipe\_engine/step][recipe_engine/recipe_modules/step]
|
||||
|
||||
PYTHON_VERSION_COMPATIBILITY: PY2
|
||||
|
||||
— **def [RunSteps](/infra/recipes/hello_world.py#12)(api):**
|
||||
|
||||
[recipe_engine/recipe_modules/step]: https://chromium.googlesource.com/infra/luci/recipes-py.git/+/7b42800366a15f34b28e62f6bcb1cddcb2206db0/README.recipes.md#recipe_modules-step
|
|
@ -6,6 +6,25 @@
|
|||
|
||||
buckets {
|
||||
name: "ci"
|
||||
swarming {
|
||||
builders {
|
||||
name: "Example Builder"
|
||||
swarming_host: "chromium-swarm.appspot.com"
|
||||
exe {
|
||||
cipd_package: "infra/recipe_bundles/chromium.googlesource.com/crosvm/crosvm"
|
||||
cipd_version: "refs/heads/main"
|
||||
cmd: "luciexe"
|
||||
}
|
||||
properties:
|
||||
'{'
|
||||
' "recipe": "hello_world"'
|
||||
'}'
|
||||
experiments {
|
||||
key: "luci.recipes.use_python3"
|
||||
value: 100
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
buckets {
|
||||
name: "prod"
|
||||
|
|
20
infra/config/generated/luci-scheduler.cfg
Normal file
20
infra/config/generated/luci-scheduler.cfg
Normal file
|
@ -0,0 +1,20 @@
|
|||
# Auto-generated by lucicfg.
|
||||
# Do not modify manually.
|
||||
#
|
||||
# For the schema of this file, see ProjectConfig message:
|
||||
# https://luci-config.appspot.com/schemas/projects:luci-scheduler.cfg
|
||||
|
||||
job {
|
||||
id: "Example Builder"
|
||||
realm: "ci"
|
||||
schedule: "with 1m interval"
|
||||
acl_sets: "ci"
|
||||
buildbucket {
|
||||
server: "cr-buildbucket.appspot.com"
|
||||
bucket: "luci.crosvm.ci"
|
||||
builder: "Example Builder"
|
||||
}
|
||||
}
|
||||
acl_sets {
|
||||
name: "ci"
|
||||
}
|
|
@ -52,6 +52,7 @@ luci.realm(name = "pools/try")
|
|||
|
||||
# Global recipe defaults
|
||||
luci.recipe.defaults.cipd_version.set("refs/heads/main")
|
||||
luci.recipe.defaults.cipd_package.set("infra/recipe_bundles/chromium.googlesource.com/crosvm/crosvm")
|
||||
luci.recipe.defaults.use_python3.set(True)
|
||||
|
||||
# The try bucket will include builders which work on pre-commit or pre-review
|
||||
|
@ -64,3 +65,23 @@ luci.bucket(name = "ci")
|
|||
# The prod bucket will include builders which work on post-commit code and
|
||||
# generate executable artifacts used by other users or machines.
|
||||
luci.bucket(name = "prod")
|
||||
|
||||
# This sets the default CIPD ref to use in builds to get the right version of
|
||||
# recipes for the build.
|
||||
#
|
||||
# The recipe bundler sets CIPD refs equal in name to the git refs that it
|
||||
# processed the recipe code from.
|
||||
#
|
||||
# Note: This will cause all recipe commits to automatically deploy as soon
|
||||
# as the recipe bundler compiles them from your refs/heads/main branch.
|
||||
cipd_version = "refs/heads/main"
|
||||
|
||||
# Example builder to verify configuration
|
||||
luci.builder(
|
||||
name = "Example Builder",
|
||||
bucket = "ci",
|
||||
executable = luci.recipe(
|
||||
name = "hello_world",
|
||||
),
|
||||
schedule = "with 1m interval",
|
||||
)
|
||||
|
|
13
infra/config/recipes.cfg
Normal file
13
infra/config/recipes.cfg
Normal file
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"api_version": 2,
|
||||
"repo_name": "crosvm",
|
||||
"canonical_repo_url": "https://chromium.googlesource.com/crosvm/crosvm.git",
|
||||
"deps": {
|
||||
"recipe_engine": {
|
||||
"branch": "refs/heads/main",
|
||||
"revision": "7b42800366a15f34b28e62f6bcb1cddcb2206db0",
|
||||
"url": "https://chromium.googlesource.com/infra/luci/recipes-py.git"
|
||||
}
|
||||
},
|
||||
"recipes_path": "infra"
|
||||
}
|
208
infra/recipes.py
Executable file
208
infra/recipes.py
Executable file
|
@ -0,0 +1,208 @@
|
|||
#!/bin/sh
|
||||
# Copyright 2019 The LUCI Authors. All rights reserved.
|
||||
# Use of this source code is governed under the Apache License, Version 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
# We want to run python in unbuffered mode; however shebangs on linux grab the
|
||||
# entire rest of the shebang line as a single argument, leading to errors like:
|
||||
#
|
||||
# /usr/bin/env: 'python3 -u': No such file or directory
|
||||
#
|
||||
# This little shell hack is a triple-quoted noop in python, but in sh it
|
||||
# evaluates to re-exec'ing this script in unbuffered mode.
|
||||
# pylint: disable=pointless-string-statement
|
||||
''''exec python3 -u -- "$0" ${1+"$@"} # '''
|
||||
# vi: syntax=python
|
||||
"""Bootstrap script to clone and forward to the recipe engine tool.
|
||||
*******************
|
||||
** DO NOT MODIFY **
|
||||
*******************
|
||||
This is a copy of https://chromium.googlesource.com/infra/luci/recipes-py/+/main/recipes.py.
|
||||
To fix bugs, fix in the googlesource repo then run the autoroller.
|
||||
"""
|
||||
# pylint: disable=wrong-import-position
|
||||
import argparse
|
||||
import errno
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from collections import namedtuple
|
||||
from io import open # pylint: disable=redefined-builtin
|
||||
try:
|
||||
import urllib.parse as urlparse
|
||||
except ImportError:
|
||||
import urlparse
|
||||
# The dependency entry for the recipe_engine in the client repo's recipes.cfg
|
||||
#
|
||||
# url (str) - the url to the engine repo we want to use.
|
||||
# revision (str) - the git revision for the engine to get.
|
||||
# branch (str) - the branch to fetch for the engine as an absolute ref (e.g.
|
||||
# refs/heads/main)
|
||||
EngineDep = namedtuple('EngineDep', 'url revision branch')
|
||||
class MalformedRecipesCfg(Exception):
|
||||
def __init__(self, msg, path):
|
||||
full_message = 'malformed recipes.cfg: %s: %r' % (msg, path)
|
||||
super(MalformedRecipesCfg, self).__init__(full_message)
|
||||
def parse(repo_root, recipes_cfg_path):
|
||||
"""Parse is a lightweight a recipes.cfg file parser.
|
||||
Args:
|
||||
repo_root (str) - native path to the root of the repo we're trying to run
|
||||
recipes for.
|
||||
recipes_cfg_path (str) - native path to the recipes.cfg file to process.
|
||||
Returns (as tuple):
|
||||
engine_dep (EngineDep|None): The recipe_engine dependency, or None, if the
|
||||
current repo IS the recipe_engine.
|
||||
recipes_path (str) - native path to where the recipes live inside of the
|
||||
current repo (i.e. the folder containing `recipes/` and/or
|
||||
`recipe_modules`)
|
||||
"""
|
||||
with open(recipes_cfg_path, 'r') as fh:
|
||||
pb = json.load(fh)
|
||||
try:
|
||||
if pb['api_version'] != 2:
|
||||
raise MalformedRecipesCfg('unknown version %d' % pb['api_version'],
|
||||
recipes_cfg_path)
|
||||
# If we're running ./recipes.py from the recipe_engine repo itself, then
|
||||
# return None to signal that there's no EngineDep.
|
||||
repo_name = pb.get('repo_name')
|
||||
if not repo_name:
|
||||
repo_name = pb['project_id']
|
||||
if repo_name == 'recipe_engine':
|
||||
return None, pb.get('recipes_path', '')
|
||||
engine = pb['deps']['recipe_engine']
|
||||
if 'url' not in engine:
|
||||
raise MalformedRecipesCfg(
|
||||
'Required field "url" in dependency "recipe_engine" not found',
|
||||
recipes_cfg_path)
|
||||
engine.setdefault('revision', '')
|
||||
engine.setdefault('branch', 'refs/heads/main')
|
||||
recipes_path = pb.get('recipes_path', '')
|
||||
# TODO(iannucci): only support absolute refs
|
||||
if not engine['branch'].startswith('refs/'):
|
||||
engine['branch'] = 'refs/heads/' + engine['branch']
|
||||
recipes_path = os.path.join(repo_root,
|
||||
recipes_path.replace('/', os.path.sep))
|
||||
return EngineDep(**engine), recipes_path
|
||||
except KeyError as ex:
|
||||
raise MalformedRecipesCfg(str(ex), recipes_cfg_path)
|
||||
IS_WIN = sys.platform.startswith(('win', 'cygwin'))
|
||||
_BAT = '.bat' if IS_WIN else ''
|
||||
GIT = 'git' + _BAT
|
||||
VPYTHON = ('vpython' +
|
||||
('3' if os.getenv('RECIPES_USE_PY3') == 'true' else '') +
|
||||
_BAT)
|
||||
CIPD = 'cipd' + _BAT
|
||||
REQUIRED_BINARIES = {GIT, VPYTHON, CIPD}
|
||||
def _is_executable(path):
|
||||
return os.path.isfile(path) and os.access(path, os.X_OK)
|
||||
# TODO: Use shutil.which once we switch to Python3.
|
||||
def _is_on_path(basename):
|
||||
for path in os.environ['PATH'].split(os.pathsep):
|
||||
full_path = os.path.join(path, basename)
|
||||
if _is_executable(full_path):
|
||||
return True
|
||||
return False
|
||||
def _subprocess_call(argv, **kwargs):
|
||||
logging.info('Running %r', argv)
|
||||
return subprocess.call(argv, **kwargs)
|
||||
def _git_check_call(argv, **kwargs):
|
||||
argv = [GIT] + argv
|
||||
logging.info('Running %r', argv)
|
||||
subprocess.check_call(argv, **kwargs)
|
||||
def _git_output(argv, **kwargs):
|
||||
argv = [GIT] + argv
|
||||
logging.info('Running %r', argv)
|
||||
return subprocess.check_output(argv, **kwargs)
|
||||
def parse_args(argv):
|
||||
"""This extracts a subset of the arguments that this bootstrap script cares
|
||||
about. Currently this consists of:
|
||||
* an override for the recipe engine in the form of `-O recipe_engine=/path`
|
||||
* the --package option.
|
||||
"""
|
||||
PREFIX = 'recipe_engine='
|
||||
p = argparse.ArgumentParser(add_help=False)
|
||||
p.add_argument('-O', '--project-override', action='append')
|
||||
p.add_argument('--package', type=os.path.abspath)
|
||||
args, _ = p.parse_known_args(argv)
|
||||
for override in args.project_override or ():
|
||||
if override.startswith(PREFIX):
|
||||
return override[len(PREFIX):], args.package
|
||||
return None, args.package
|
||||
def checkout_engine(engine_path, repo_root, recipes_cfg_path):
|
||||
dep, recipes_path = parse(repo_root, recipes_cfg_path)
|
||||
if dep is None:
|
||||
# we're running from the engine repo already!
|
||||
return os.path.join(repo_root, recipes_path)
|
||||
url = dep.url
|
||||
if not engine_path and url.startswith('file://'):
|
||||
engine_path = urlparse.urlparse(url).path
|
||||
if not engine_path:
|
||||
revision = dep.revision
|
||||
branch = dep.branch
|
||||
# Ensure that we have the recipe engine cloned.
|
||||
engine_path = os.path.join(recipes_path, '.recipe_deps', 'recipe_engine')
|
||||
with open(os.devnull, 'w') as NUL:
|
||||
# Note: this logic mirrors the logic in recipe_engine/fetch.py
|
||||
_git_check_call(['init', engine_path], stdout=NUL)
|
||||
try:
|
||||
_git_check_call(['rev-parse', '--verify',
|
||||
'%s^{commit}' % revision],
|
||||
cwd=engine_path,
|
||||
stdout=NUL,
|
||||
stderr=NUL)
|
||||
except subprocess.CalledProcessError:
|
||||
_git_check_call(['fetch', '--quiet', url, branch],
|
||||
cwd=engine_path,
|
||||
stdout=NUL)
|
||||
try:
|
||||
_git_check_call(['diff', '--quiet', revision], cwd=engine_path)
|
||||
except subprocess.CalledProcessError:
|
||||
index_lock = os.path.join(engine_path, '.git', 'index.lock')
|
||||
try:
|
||||
os.remove(index_lock)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
logging.warn('failed to remove %r, reset will fail: %s', index_lock,
|
||||
exc)
|
||||
_git_check_call(['reset', '-q', '--hard', revision], cwd=engine_path)
|
||||
# If the engine has refactored/moved modules we need to clean all .pyc files
|
||||
# or things will get squirrely.
|
||||
_git_check_call(['clean', '-qxf'], cwd=engine_path)
|
||||
return engine_path
|
||||
def main():
|
||||
for required_binary in REQUIRED_BINARIES:
|
||||
if not _is_on_path(required_binary):
|
||||
return 'Required binary is not found on PATH: %s' % required_binary
|
||||
if '--verbose' in sys.argv:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
args = sys.argv[1:]
|
||||
engine_override, recipes_cfg_path = parse_args(args)
|
||||
if recipes_cfg_path:
|
||||
# calculate repo_root from recipes_cfg_path
|
||||
repo_root = os.path.dirname(
|
||||
os.path.dirname(os.path.dirname(recipes_cfg_path)))
|
||||
else:
|
||||
# find repo_root with git and calculate recipes_cfg_path
|
||||
repo_root = (
|
||||
_git_output(['rev-parse', '--show-toplevel'],
|
||||
cwd=os.path.abspath(os.path.dirname(__file__))).strip())
|
||||
repo_root = os.path.abspath(repo_root).decode()
|
||||
recipes_cfg_path = os.path.join(repo_root, 'infra', 'config', 'recipes.cfg')
|
||||
args = ['--package', recipes_cfg_path] + args
|
||||
engine_path = checkout_engine(engine_override, repo_root, recipes_cfg_path)
|
||||
argv = (
|
||||
[VPYTHON, '-u',
|
||||
os.path.join(engine_path, 'recipe_engine', 'main.py')] + args)
|
||||
if IS_WIN:
|
||||
# No real 'exec' on windows; set these signals to ignore so that they
|
||||
# propagate to our children but we still wait for the child process to quit.
|
||||
import signal
|
||||
signal.signal(signal.SIGBREAK, signal.SIG_IGN)
|
||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
signal.signal(signal.SIGTERM, signal.SIG_IGN)
|
||||
return _subprocess_call(argv)
|
||||
else:
|
||||
os.execvp(argv[0], argv)
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
9
infra/recipes/hello_world.expected/basic.json
Normal file
9
infra/recipes/hello_world.expected/basic.json
Normal file
|
@ -0,0 +1,9 @@
|
|||
[
|
||||
{
|
||||
"cmd": [],
|
||||
"name": "Hello world"
|
||||
},
|
||||
{
|
||||
"name": "$result"
|
||||
}
|
||||
]
|
20
infra/recipes/hello_world.py
Normal file
20
infra/recipes/hello_world.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
# Enumerates the recipe modules that this recipe uses.
|
||||
#
|
||||
# "recipe_engine" is the "repo_name" for the recipes-py repo, and "step"
|
||||
# is the name of the "step" recipe module within that repo. The
|
||||
# "recipe_engine/step" module will be the most frequently-used module in your
|
||||
# recipes as it allows you to run executables within your build.
|
||||
DEPS = [
|
||||
"recipe_engine/step",
|
||||
]
|
||||
|
||||
|
||||
def RunSteps(api):
|
||||
# Creates an 'empty' (i.e. no-op) step in the UI with the name "Hello world".
|
||||
api.step.empty("Hello world")
|
||||
|
||||
|
||||
def GenTests(api):
|
||||
# Tells the recipe engine to generate an expectation file (JSON simulation
|
||||
# output) for this recipe when it is run without any input properties.
|
||||
yield api.test("basic")
|
13
tools/fmt
13
tools/fmt
|
@ -34,6 +34,12 @@ black = cmd("black", "--line-length 100")
|
|||
# How many files to check at once in each thread.
|
||||
BATCH_SIZE = 8
|
||||
|
||||
# Files not under our control or auto-generated.
|
||||
IGNORE = [
|
||||
"infra/README.recipes.md",
|
||||
"infra/recipes.py",
|
||||
]
|
||||
|
||||
|
||||
def main(check: bool = False):
|
||||
chdir(CROSVM_ROOT)
|
||||
|
@ -42,10 +48,13 @@ def main(check: bool = False):
|
|||
print(f"{'Checking format' if check else 'Formatting'}: Rust, Markdown")
|
||||
parallel(
|
||||
*rustfmt(check_arg).foreach(find_source_files("rs"), batch_size=BATCH_SIZE),
|
||||
*mdformat("--wrap 100", check_arg).foreach(find_source_files("md"), batch_size=BATCH_SIZE),
|
||||
*mdformat("--wrap 100", check_arg).foreach(
|
||||
find_source_files("md", IGNORE),
|
||||
batch_size=BATCH_SIZE,
|
||||
),
|
||||
*black(check_arg).foreach(
|
||||
(
|
||||
*find_source_files("py"),
|
||||
*find_source_files("py", IGNORE),
|
||||
*find_scripts(Path("tools"), "/usr/bin/env python3"),
|
||||
),
|
||||
batch_size=BATCH_SIZE,
|
||||
|
|
|
@ -526,12 +526,14 @@ def __add_verbose_args(parser: argparse.ArgumentParser):
|
|||
)
|
||||
|
||||
|
||||
def find_source_files(extension: str):
|
||||
def find_source_files(extension: str, ignore: list[str] = []):
|
||||
for file in Path(".").glob(f"**/*.{extension}"):
|
||||
if file.is_relative_to("third_party"):
|
||||
continue
|
||||
if "target" in file.parts:
|
||||
continue
|
||||
if str(file) in ignore:
|
||||
continue
|
||||
yield file
|
||||
|
||||
|
||||
|
|
Loading…
Reference in a new issue