Skip to content

Commit

Permalink
WIP: Adapt for multi-arch layout
Browse files Browse the repository at this point in the history
Still a lot of prep patches to split out and some more testing to do,
but it works pretty well so far.

```
$ find builds
builds
builds/builds.json
builds/30.1
builds/30.1/x86_64
builds/30.1/x86_64/coreos-assembler-config.tar.gz
builds/30.1/x86_64/coreos-assembler-config-git.json
builds/30.1/x86_64/fedora-coreos-30.1-qemu.qcow2
builds/30.1/x86_64/ostree-commit-object
builds/30.1/x86_64/meta.json
builds/30.1/x86_64/manifest-lock.generated.json
builds/30.1/x86_64/commitmeta.json
builds/30.1/x86_64/ostree-commit.tar
builds/30.1/x86_64/fedora-coreos-30.1-installer-kernel
builds/30.1/x86_64/fedora-coreos-30.1-installer-initramfs.img
builds/30.1/x86_64/fedora-coreos-30.1-installer.iso
builds/30.1/x86_64/fedora-coreos-30.1-metal.raw
builds/30.1/x86_64/fedora-coreos-30.1-openstack.qcow2
builds/latest

$ cat builds/builds.json
{
    "schema-version": "1.0.0",
    "builds": [
        {
            "id": "30.1",
            "archs": [
                "x86_64"
            ]
        }
    ],
    "timestamp": "2019-06-28T20:50:54Z"
```

For more context, see:
#463 (comment)

The key thing to note here is that this only affects new workdirs
currently. I'll also be writing a separate migration tool so that we can
adapt S3 buckets to the new layout.

Adapting to this will definitely be a bit painful, but I think it's
worth it overall. Making this native to cosa means that we don't need a
translation layer between our S3 bucket and cosa, we can parallelize
things more easily in the future, and e.g. we can transparently use
"bulk" commands like `cosa buildupload` and `cosa compress`.
  • Loading branch information
jlebon committed Jun 28, 2019
1 parent 55102a2 commit 5071a55
Show file tree
Hide file tree
Showing 18 changed files with 383 additions and 213 deletions.
24 changes: 13 additions & 11 deletions src/cmd-build
Original file line number Diff line number Diff line change
Expand Up @@ -80,10 +80,9 @@ prepare_build
ostree --version
rpm-ostree --version

previous_build=
if [ -L "${workdir}"/builds/latest ]; then
previous_build=$(readlink "${workdir}"/builds/latest)
previous_builddir="${workdir}/builds/${previous_build}"
previous_build=$(get_latest_build)
if [ -n "${previous_build}" ]; then
previous_builddir=$(get_build_dir "${previous_build}")
echo "Previous build: ${previous_build}"
fi

Expand Down Expand Up @@ -145,7 +144,8 @@ commitmeta_input_json=${PWD}/tmp/commit-metadata-input.json
cat > "${commitmeta_input_json}" <<EOF
{
"coreos-assembler.config-gitrev": "${config_gitrev}",
"coreos-assembler.config-dirty": "${config_dirty}"
"coreos-assembler.config-dirty": "${config_dirty}",
"coreos-assembler.basearch": "${basearch}"
}
EOF

Expand Down Expand Up @@ -360,18 +360,20 @@ saved_build_tmpdir="${workdir}/tmp/last-build-tmp"
rm -rf "${saved_build_tmpdir}"
mv -T tmp "${saved_build_tmpdir}"
ostree prune --repo="${tmprepo}" --refs-only
# Back to the toplevel build directory, so we can rename this one
cd "${workdir}"/builds
# Back to the toplevel work directory, so we can rename this one
cd "${workdir}"
# We create a .build-commit file to note that we're in the
# middle of a "commit". This may be useful in the future
# for having things be transactional. If for example we
# were interrupted between the rename() and linkat() below,
# things would be inconsistent and future builds would fail
# on the `mv`.
touch .build-commit
mv -T "${tmp_builddir}" "${buildid}"
touch builds/.build-commit
builddir=$(get_build_dir "${buildid}")
mkdir -p "${builddir}"
mv -T "${tmp_builddir}" "${builddir}"
# Replace the latest link
ln -Tsfr "${buildid}" latest
ln -Tsf "${buildid}" builds/latest
# Update builds.json
# the variables passed to `prune_builds` end up single quoted and
# python treats them as literals, so we workaround this by duplicating
Expand All @@ -381,4 +383,4 @@ if [ "${SKIP_PRUNE}" == 1 ]; then
else
"${dn}"/prune_builds --workdir "${workdir}"
fi
rm .build-commit
rm builds/.build-commit
6 changes: 3 additions & 3 deletions src/cmd-buildextend-aws
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import os,sys,json,yaml,shutil,argparse,subprocess,re,collections
import tempfile,hashlib,gzip

sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from cmdlib import run_verbose, write_json, sha256sum_file
from cmdlib import run_verbose, write_json, sha256sum_file, Builds

# Parse args and dispatch
parser = argparse.ArgumentParser()
Expand All @@ -23,8 +23,8 @@ parser.add_argument("--grant-user", help="Grant user launch permission",
nargs="*", default=[])
args = parser.parse_args()


builddir = os.path.join('builds', args.build)
builds = Builds()
builddir = builds.get_build_dir(args.build)
buildmeta_path = os.path.join(builddir, 'meta.json')
with open(buildmeta_path) as f:
buildmeta = json.load(f)
Expand Down
6 changes: 4 additions & 2 deletions src/cmd-buildextend-gcp
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@ sys.path.insert(0, cosa_dir)
from cmdlib import (
run_verbose,
sha256sum_file,
write_json)
write_json,
Builds)

# Parse args and dispatch
parser = argparse.ArgumentParser()
Expand Down Expand Up @@ -49,7 +50,8 @@ if args.project is None:
raise Exception(arg_exp_str.format("project", "GCP_PROJECT"))

# Identify the builds
builddir = os.path.join('builds', args.build)
builds = Builds()
builddir = builds.get_build_dir(args.build)
buildmeta_path = os.path.join(builddir, 'meta.json')
with open(buildmeta_path) as f:
buildmeta = json.load(f)
Expand Down
10 changes: 5 additions & 5 deletions src/cmd-buildextend-installer
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import tempfile

sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from cmdlib import run_verbose, write_json, sha256sum_file
from cmdlib import import_ostree_commit
from cmdlib import import_ostree_commit, Builds

# Parse args and dispatch
parser = argparse.ArgumentParser()
Expand All @@ -23,16 +23,16 @@ parser.add_argument("--force", action='store_true', default=False,
help="Overwrite previously generated installer")
args = parser.parse_args()

builds = Builds()

# default to latest build if not specified
if not args.build:
with open('builds/builds.json') as f:
j = json.load(f)
args.build = j['builds'][0]
args.build = builds.get_latest()

print(f"Targeting build: {args.build}")

workdir = os.path.abspath(os.getcwd())
builddir = os.path.join(workdir, 'builds', args.build)
builddir = builds.get_build_dir(args.build)
buildmeta_path = os.path.join(builddir, 'meta.json')
with open(buildmeta_path) as f:
buildmeta = json.load(f)
Expand Down
9 changes: 4 additions & 5 deletions src/cmd-buildextend-metal
Original file line number Diff line number Diff line change
Expand Up @@ -68,14 +68,13 @@ export LIBGUESTFS_BACKEND=direct
prepare_build

if [ -z "${build}" ]; then
if [ -L "${workdir}"/builds/latest ]; then
build=$(readlink "${workdir}"/builds/latest)
else
build=$(get_latest_build)
if [ -z "${build}" ]; then
fatal "No build found."
fi
fi

builddir="${workdir}/builds/${build}"
builddir=$(get_build_dir "$build")
if [ ! -d "${builddir}" ]; then
fatal "Build dir ${builddir} does not exist."
fi
Expand Down Expand Up @@ -138,7 +137,7 @@ else
# 384M is the non-ostree partitions
size="$(( size + 384 ))M"
echo "Disk size estimated to $size"
kargs="$(python3 -c 'import sys, yaml; args = yaml.load(sys.stdin)["extra-kargs"]; print(" ".join(args))' < "$configdir/image.yaml")"
kargs="$(python3 -c 'import sys, yaml; args = yaml.safe_load(sys.stdin)["extra-kargs"]; print(" ".join(args))' < "$configdir/image.yaml")"
kargs="$kargs console=tty0 console=${VM_TERMINAL},115200n8 ignition.platform.id=metal"

qemu-img create -f qcow2 "${path}.qcow2" "$size"
Expand Down
10 changes: 5 additions & 5 deletions src/cmd-buildextend-openstack
Original file line number Diff line number Diff line change
Expand Up @@ -11,22 +11,22 @@ import shutil
import argparse

sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from cmdlib import run_verbose, write_json, sha256sum_file
from cmdlib import run_verbose, write_json, sha256sum_file, Builds

# Parse args and dispatch
parser = argparse.ArgumentParser()
parser.add_argument("--build", help="Build ID")
args = parser.parse_args()

builds = Builds()

# default to latest build if not specified
if not args.build:
with open('builds/builds.json') as f:
j = json.load(f)
args.build = j['builds'][0]
args.build = builds.get_latest()

print(f"Targeting build: {args.build}")

builddir = os.path.join('builds', args.build)
builddir = builds.get_build_dir(args.build)
buildmeta_path = os.path.join(builddir, 'meta.json')
with open(buildmeta_path) as f:
buildmeta = json.load(f)
Expand Down
9 changes: 5 additions & 4 deletions src/cmd-buildextend-vmware
Original file line number Diff line number Diff line change
Expand Up @@ -11,19 +11,20 @@ import argparse
import tarfile

sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from cmdlib import run_verbose, load_json, write_json, sha256sum_file
from cmdlib import run_verbose, load_json, write_json, sha256sum_file, Builds

parser = argparse.ArgumentParser()
parser.add_argument("--build", help="Build ID",
required=False)
args = parser.parse_args()

builds = Builds()

# default to latest build if not specified
if not args.build:
j = load_json('builds/builds.json')
args.build = j['builds'][0]
args.build = builds.get_latest()

builddir = os.path.join('builds', args.build)
builddir = builds.get_build_dir(args.build)
buildmeta_path = os.path.join(builddir, 'meta.json')
buildmeta = load_json(buildmeta_path)
base_name = buildmeta['name']
Expand Down
31 changes: 23 additions & 8 deletions src/cmd-buildprep
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import boto3
import shutil

sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from cmdlib import load_json, rm_allow_noent # noqa: E402
from cmdlib import load_json, rm_allow_noent, Builds # noqa: E402


def main():
Expand All @@ -29,26 +29,35 @@ def main():
else:
raise Exception("Invalid scheme: only file://, s3://, and http(s):// supported")

builds = []
builds = None
if fetcher.exists('builds.json'):
builds = fetcher.fetch_json('builds.json')['builds']
fetcher.fetch_json('builds.json')['builds']
builds = Builds()

if len(builds) == 0:
if not builds or builds.is_empty():
print("Remote has no builds!")
return

buildid = builds[0]
os.makedirs(f'builds/{buildid}', exist_ok=True)
# NB: we only buildprep for the arch we're on

buildid = builds.get_latest()
builddir = builds.get_build_dir(buildid)
os.makedirs(builddir, exist_ok=True)

# trim out the leading builds/
assert builddir.startswith("builds/")
builddir = builddir[len("builds/"):]

for f in ['meta.json', 'ostree-commit-object']:
fetcher.fetch(f'{buildid}/{f}')
fetcher.fetch(f'{builddir}/{f}')

# and finally the symlink
rm_allow_noent('builds/latest')
os.symlink(buildid, 'builds/latest')

# also nuke the any local matching OStree ref, since we want to build on
# top of this new one
buildmeta = load_json('builds/latest/meta.json')
buildmeta = load_json(f'builds/{builddir}/meta.json')
if 'ref' in buildmeta and os.path.isdir('tmp/repo'):
subprocess.check_call(['ostree', 'refs', '--repo', 'tmp/repo',
'--delete', buildmeta['ref']],
Expand Down Expand Up @@ -79,6 +88,12 @@ class Fetcher(object):
self.fetch_impl(url, dest)
return dest

def fetch_impl(self, url, dest):
raise NotImplementedError

def exists_impl(self, url):
raise NotImplementedError

def fetch_json(self, path):
return load_json(self.fetch(path))

Expand Down
35 changes: 21 additions & 14 deletions src/cmd-buildupload
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import sys
import tempfile

sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from cmdlib import load_json # noqa: E402
from cmdlib import load_json, Builds # noqa: E402


def main():
Expand All @@ -22,6 +22,8 @@ def main():
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("--build", help="Build ID", default='latest')
parser.add_argument("--dry-run", help="Just print and exit",
action='store_true')
parser.add_argument("--freshen", help="Only push builds.json",
action='store_true')

Expand All @@ -42,18 +44,22 @@ def parse_args():

def cmd_upload_s3(args):
if not args.freshen:
s3_upload_build(args)
builds = Builds()
if args.build == 'latest':
args.build = builds.get_latest()
print(f"Targeting build: {args.build}")
if builds.is_legacy():
s3_upload_build(args, builds.get_build_dir(args.build), args.build)
else:
for arch in builds.get_build_archs(args.build):
s3_upload_build(args, builds.get_build_dir(args.build, arch),
f'{args.build}/{arch}')
s3_cp(args, 'builds/builds.json', 'builds.json',
'--cache-control=max-age=60')


def s3_upload_build(args):
builddir = f'builds/{args.build}'
def s3_upload_build(args, builddir, dest):
build = load_json(f'{builddir}/meta.json')
buildid = build['buildid']

print(f"Targeting build: {buildid}")
print(f" OSTree commit: {build['ostree-commit']}")

# Upload images with special handling for gzipped data.
uploaded = set()
Expand All @@ -69,35 +75,36 @@ def s3_upload_build(args):
args.enable_gz_peel):
nogz = bn[:-3]
img['path'] = nogz
s3_cp(args, path, f'{buildid}/{nogz}',
s3_cp(args, path, f'{dest}/{nogz}',
'--content-encoding=gzip',
f'--content-disposition=inline; filename={nogz}')
else:
s3_cp(args, path, f'{buildid}/{bn}')
s3_cp(args, path, f'{dest}/{bn}')
uploaded.add(bn)

for f in os.listdir(builddir):
# we do meta.json right after
if f in uploaded or f == 'meta.json':
continue
path = os.path.join(builddir, f)
s3_cp(args, path, f'{buildid}/{f}')
s3_cp(args, path, f'{dest}/{f}')

# Now upload a modified version of the meta.json which has the fixed
# filenames without the .gz suffixes. We don't want to modify the local
# build dir.
with tempfile.NamedTemporaryFile('w') as f:
json.dump(build, f, indent=4)
f.flush()
s3_cp(args, f.name, f'{buildid}/meta.json')
s3_cp(args, f.name, f'{dest}/meta.json')


def s3_cp(args, src, dest, *s3_args):
acl = f'--acl={args.acl}'
dest = f's3://{args.url}/{dest}'
print(f"Uploading: {dest}")
subprocess.check_call(['aws', 's3', 'cp', acl, src, dest, *s3_args],
stdout=subprocess.DEVNULL)
if not args.dry_run:
subprocess.check_call(['aws', 's3', 'cp', acl, src, dest, *s3_args],
stdout=subprocess.DEVNULL)


if __name__ == '__main__':
Expand Down
3 changes: 1 addition & 2 deletions src/cmd-clean
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,5 @@ prepare_build

# But go back to the toplevel
cd "${workdir:?}"
# Note we don't prune the cache.qcow2 or the objects
# in the repo. If you want that, just rm -rf them.
# Note we don't prune the cache. If you want that, just rm -rf them.
rm -rf builds/* tmp/*
Loading

0 comments on commit 5071a55

Please sign in to comment.