Skip to content

Release

Release #70

Workflow file for this run

name: Release
on:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}
cancel-in-progress: false
env:
COMMIT_NAME: Monkeynator
COMMIT_EMAIL: monkeynator@enix.io
VERSION_KIND: v0.20.0
VERSION_GOLANGCILINT: latest
VERSION_GOLANG: '1.21.5'
VERSION_CHARTRELEASER: '1.6.0'
VERSION_OCI_BUSYBOX: '1.36.1-uclibc'
VERSION_OCI_ALPINE: '3.19.0'
jobs:
semver:
name: Semantic Version
runs-on: ubuntu-22.04
outputs:
last: ${{ steps.dry-run.outputs.last_release_version }}
published: ${{ steps.dry-run.outputs.new_release_published }}
channel: ${{ steps.dry-run.outputs.new_release_channel }}
version: ${{ steps.dry-run.outputs.new_release_version }}
major: ${{ steps.dry-run.outputs.new_release_major_version }}
minor: ${{ steps.dry-run.outputs.new_release_minor_version }}
patch: ${{ steps.dry-run.outputs.new_release_patch_version }}
notes: ${{ steps.dry-run.outputs.new_release_notes }}
steps:
- name: Checkout Repository
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Run semantic-release (dry-run)
id: dry-run
uses: cycjimmy/semantic-release-action@v4
with:
dry_run: true
env:
GITHUB_TOKEN: ${{ secrets.RELEASE_GITHUB_TOKEN }}
- name: Inspect semantic-release (dry-run) outcome
shell: python
env:
PYTHONPATH: ${{ github.workspace }}/.github
SR_LAST: ${{ steps.dry-run.outputs.last_release_version }}
SR_PUBLISHED: ${{ steps.dry-run.outputs.new_release_published }}
SR_CHANNEL: ${{ steps.dry-run.outputs.new_release_channel }}
SR_VERSION: ${{ steps.dry-run.outputs.new_release_version }}
SR_MAJOR: ${{ steps.dry-run.outputs.new_release_major_version }}
SR_MINOR: ${{ steps.dry-run.outputs.new_release_minor_version }}
SR_PATCH: ${{ steps.dry-run.outputs.new_release_patch_version }}
SR_NOTES: ${{ steps.dry-run.outputs.new_release_notes }}
run: |
from lib import *
import os
header('semantic-release job outputs')
info('last = {}'.format(os.environ['SR_LAST']))
info('published = {}'.format(os.environ['SR_PUBLISHED']))
info('channel = {}'.format(os.environ['SR_CHANNEL']))
info('version = {}'.format(os.environ['SR_VERSION']))
info('major = {}'.format(os.environ['SR_MAJOR']))
info('minor = {}'.format(os.environ['SR_MINOR']))
info('patch = {}'.format(os.environ['SR_PATCH']))
info('notes ⏎\n{}'.format(os.environ['SR_NOTES']))
header('sanity checks')
action('should be published')
assert_equality((
(os.environ['SR_PUBLISHED'], 'true'),
))
tests:
name: Tests
runs-on: ubuntu-22.04
steps:
- name: Create KinD cluster
uses: engineerd/setup-kind@v0.5.0
with:
version: ${{ env.VERSION_KIND }}
- name: Checkout Repository
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Go
uses: actions/setup-go@v5
with:
go-version: ${{ env.VERSION_GOLANG }}
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.6.1
- name: Run golangci-lint
uses: golangci/golangci-lint-action@v3
with:
version: ${{ env.VERSION_GOLANGCILINT }}
args: --timeout 3m --verbose --out-${NO_FUTURE}format colored-line-number
- name: Run unit tests
run: |
set -euo pipefail
kubectl cluster-info
go test -v -failfast -timeout 1m -coverprofile=coverage.cov ./internal
go tool cover -html=coverage.cov -o coverage.html
- name: Run hadolint
id: hadolint
uses: hadolint/hadolint-action@v3.1.0
- name: Run chart-testing (lint)
run: |
set -euo pipefail
ct lint \
--charts deploy/charts/x509-certificate-exporter \
--validate-maintainers=false --check-version-increment=false
# FIXME
#- name: Run chart-testing (install)
# run: |
# set -euo pipefail
# ct install --chart-dirs deploy/charts
build:
name: Build
needs:
- semver
- tests
runs-on: ubuntu-22.04
strategy:
matrix:
#goos: [linux]
#goarch: [amd64, arm64, riscv64]
goos:
[linux, darwin, freebsd, illumos, netbsd, openbsd, solaris, windows]
goarch: [amd64, arm64, riscv64, '386']
exclude:
- { goos: darwin, goarch: riscv64 }
- { goos: darwin, goarch: '386' }
- { goos: illumos, goarch: arm64 }
- { goos: illumos, goarch: riscv64 }
- { goos: illumos, goarch: '386' }
- { goos: netbsd, goarch: riscv64 }
- { goos: openbsd, goarch: riscv64 }
- { goos: solaris, goarch: arm64 }
- { goos: solaris, goarch: riscv64 }
- { goos: solaris, goarch: '386' }
- { goos: windows, goarch: riscv64 }
steps:
- name: Checkout Repository
uses: actions/checkout@v4
with:
path: repository
persist-credentials: false
- name: Set up Go
uses: actions/setup-go@v5
with:
go-version: ${{ env.VERSION_GOLANG }}
- name: Build binaries and prepare assets
shell: python
env:
PYTHONPATH: ${{ github.workspace }}/repository/.github
WORKSPACE: ${{ github.workspace }}
GOOS: ${{ matrix.goos }}
GOARCH: ${{ matrix.goarch }}
VERSION: ${{ needs.semver.outputs.version }}
REVISION: ${{ github.sha }}
run: |
from lib import *
from datetime import datetime
from zipfile import ZipFile, ZIP_DEFLATED
from hashlib import sha256
import os, tarfile
header('prepare build environment')
build_output_dir = os.path.join(os.environ['WORKSPACE'], 'build')
release_assets_dir = os.path.join(os.environ['WORKSPACE'], 'assets')
os.mkdir(build_output_dir)
os.mkdir(release_assets_dir)
header('set build parameters')
if os.environ['GOOS'] == 'windows':
binary_extension = '.exe'
asset_extension = '.zip'
zip_archive = True
else:
binary_extension = ''
asset_extension = '.tar.gz'
zip_archive = False
base_name = 'x509-certificate-exporter'
binary_name = '{}{}'.format(base_name, binary_extension)
asset_name = '{}-{}-{}{}'.format(base_name, os.environ['GOOS'], os.environ['GOARCH'], asset_extension)
sum_name = '{}.sha256'.format(asset_name)
build_time = datetime.utcnow().isoformat(timespec='seconds')
ldflags = [
'-X github.com/enix/x509-certificate-exporter/v3/internal.Version={}'.format(os.environ['VERSION']),
'-X github.com/enix/x509-certificate-exporter/v3/internal.Revision={}'.format(os.environ['REVISION']),
'-X github.com/enix/x509-certificate-exporter/v3/internal.BuildDateTime={}'.format(build_time),
]
info('build output directory = {}'.format(build_output_dir))
info('release assets directory = {}'.format(release_assets_dir))
info('output binary filename = {}'.format(binary_name))
info('release asset filename = {}'.format(asset_name))
info('release asset sum file = {}'.format(sum_name))
info('build time = {}'.format(build_time))
info('golang version = {}'.format(run('go', 'version', capture_output=True).strip()))
info('ldflags')
for ldflag in ldflags:
info(' {}'.format(ldflag))
header('build project')
binary_output_path = os.path.join(build_output_dir, binary_name)
run('go', 'build',
'-tags', 'netgo,osusergo',
'-ldflags', ' '.join(ldflags),
'-o', binary_output_path,
'./cmd/x509-certificate-exporter',
cwd=os.path.join(os.environ['WORKSPACE'], 'repository'),
env=os.environ)
header('generate release asset')
action('create archive')
asset_path = os.path.join(release_assets_dir, asset_name)
if zip_archive:
with ZipFile(asset_path, 'x', ZIP_DEFLATED) as zipfile:
zipfile.write(binary_output_path, arcname=binary_name)
else:
def reset(tarinfo):
tarinfo.uid = tarinfo.gid = 1000
tarinfo.uname = tarinfo.gname = 'user'
return tarinfo
with tarfile.open(asset_path, 'x:gz') as tarfile:
tarfile.add(binary_output_path, arcname=binary_name, filter=reset)
action('compute archive checksums')
hex_sum = sha256(open(asset_path, 'rb').read()).hexdigest()
info('SHA-256 = {}'.format(hex_sum))
assert_length((
(hex_sum, 64),
))
sum_path = os.path.join(release_assets_dir, sum_name)
open(sum_path, 'x').write(hex_sum)
- name: Upload assets to artifact
uses: actions/upload-artifact@v3
with:
name: binaries
path: assets/*
retention-days: 1
draft-release:
name: Draft the release
needs:
- semver
- build
runs-on: ubuntu-22.04
outputs:
last: ${{ steps.release.outputs.last_release_version }}
published: ${{ steps.release.outputs.new_release_published }}
channel: ${{ steps.release.outputs.new_release_channel }}
version: ${{ steps.release.outputs.new_release_version }}
major: ${{ steps.release.outputs.new_release_major_version }}
minor: ${{ steps.release.outputs.new_release_minor_version }}
patch: ${{ steps.release.outputs.new_release_patch_version }}
notes: ${{ steps.release.outputs.new_release_notes }}
prerelease: ${{ steps.inspect.outputs.prerelease }}
steps:
- name: Checkout Repository
uses: actions/checkout@v4
with:
path: repository
persist-credentials: false
- name: Retrieve assets from artifact
uses: actions/download-artifact@v3
with:
name: binaries
path: assets
- name: Run semantic-release
id: release
uses: cycjimmy/semantic-release-action@v4
with:
working_directory: repository
env:
GITHUB_TOKEN: ${{ secrets.RELEASE_GITHUB_TOKEN }}
GIT_COMMITTER_NAME: ${{ env.COMMIT_NAME }}
GIT_COMMITTER_EMAIL: ${{ env.COMMIT_EMAIL }}
GIT_AUTHOR_NAME: ${{ env.COMMIT_NAME }}
GIT_AUTHOR_EMAIL: ${{ env.COMMIT_EMAIL }}
- name: Inspect semantic-release outcome
id: inspect
shell: python
env:
PYTHONPATH: ${{ github.workspace }}/repository/.github
SRDRY_CHANNEL: ${{ needs.semver.outputs.channel }}
SRDRY_VERSION: ${{ needs.semver.outputs.version }}
SR_LAST: ${{ steps.release.outputs.last_release_version }}
SR_PUBLISHED: ${{ steps.release.outputs.new_release_published }}
SR_CHANNEL: ${{ steps.release.outputs.new_release_channel }}
SR_VERSION: ${{ steps.release.outputs.new_release_version }}
SR_MAJOR: ${{ steps.release.outputs.new_release_major_version }}
SR_MINOR: ${{ steps.release.outputs.new_release_minor_version }}
SR_PATCH: ${{ steps.release.outputs.new_release_patch_version }}
SR_NOTES: ${{ steps.release.outputs.new_release_notes }}
run: |
from lib import *
import os
header('semantic-release job outputs')
info('last = {}'.format(os.environ['SR_LAST']))
info('published = {}'.format(os.environ['SR_PUBLISHED']))
info('channel = {}'.format(os.environ['SR_CHANNEL']))
info('version = {}'.format(os.environ['SR_VERSION']))
info('major = {}'.format(os.environ['SR_MAJOR']))
info('minor = {}'.format(os.environ['SR_MINOR']))
info('patch = {}'.format(os.environ['SR_PATCH']))
info('notes ⏎\n{}'.format(os.environ['SR_NOTES']))
header('sanity checks')
action('should be published')
assert_equality((
(os.environ['SR_PUBLISHED'], 'true'),
))
action('consistency with the dry-run')
assert_equality((
(os.environ['SR_CHANNEL'], os.environ['SRDRY_CHANNEL']),
(os.environ['SR_VERSION'], os.environ['SRDRY_VERSION']),
))
header('set the prerelease status')
is_prerelease = '-' in os.environ['SR_VERSION']
info('pre-release = {}'.format(is_prerelease))
with open(os.environ['GITHUB_OUTPUT'], 'a') as gho:
gho.write("prerelease={}\n".format(str(is_prerelease).lower()))
manual-edit-release-notes:
name: Manual edition of Github release notes
needs:
- draft-release
runs-on: ubuntu-22.04
environment: Github Release
steps:
- run: echo "Approved Github release notes"
containers:
name: Containers
needs:
- draft-release
- manual-edit-release-notes
runs-on: ubuntu-22.04
steps:
- name: Configure git
run: |
set -euo pipefail
git config --global user.name '${{ env.COMMIT_NAME }}'
git config --global user.email '${{ env.COMMIT_EMAIL }}'
- name: Checkout Repository
uses: actions/checkout@v4
with:
path: repository
persist-credentials: false
- name: Cache for chart-releaser
id: cache-cr
uses: actions/cache@v3
with:
path: bin/cr
key: ${{ runner.os }}-cr-${{ env.VERSION_CHARTRELEASER }}
- name: Set up chart-releaser
if: steps.cache-cr.outputs.cache-hit != 'true'
run: |
set -euo pipefail
[ -d bin ] || mkdir bin
URL='https://github.com/helm/chart-releaser/releases/download/v${{ env.VERSION_CHARTRELEASER }}/chart-releaser_${{ env.VERSION_CHARTRELEASER }}_linux_amd64.tar.gz'
curl -sSL "${URL}" | tar xz -C bin cr
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Quay.io
uses: docker/login-action@v3
with:
registry: quay.io
username: ${{ secrets.QUAY_USERNAME }}
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
- name: Retrieve assets from artifact
uses: actions/download-artifact@v4
with:
name: binaries
path: assets
- name: Build container images from binary assets
id: build-oci
shell: python
env:
PYTHONPATH: ${{ github.workspace }}/repository/.github
WORKSPACE: ${{ github.workspace }}
REPOSITORY: ${{ github.repository }}
VERSION_OCI_BUSYBOX: ${{ env.VERSION_OCI_BUSYBOX }}
VERSION_OCI_ALPINE: ${{ env.VERSION_OCI_ALPINE }}
VERSION: ${{ needs.draft-release.outputs.version }}
PRERELEASE: ${{ needs.draft-release.outputs.prerelease }}
REVISION: ${{ github.sha }}
run: |
from lib import *
from datetime import datetime
import os, tarfile
header('preparing the build matrix')
base_images = {
'busybox': os.environ['VERSION_OCI_BUSYBOX'],
'alpine': os.environ['VERSION_OCI_ALPINE'],
'scratch': None,
}
info('base images to build from:')
for image, tag in base_images.items():
info(' {}{}'.format(image, ':{}'.format(tag) if tag else ''))
base_archs = ('amd64', 'arm64', 'riscv64')
info('architectures to build for:')
for arch in base_archs:
info(' {}'.format(arch))
version = os.environ['VERSION']
prerelease = os.environ['PRERELEASE']
assert_length_above((
(version, 0),
(prerelease, 0),
))
prerelease = prerelease.lower() == 'true'
for base_image, base_tag in base_images.items():
header('{}: create the multi-platform manifest'.format(base_image))
manifest_name = 'x509-certificate-exporter-{}'.format(base_image)
manifest_id = run('buildah', 'manifest', 'create', manifest_name, capture_output=True).strip()
info('name = {}'.format(manifest_name))
info('id = {}'.format(manifest_id))
run('buildah', 'inspect', '--type', 'manifest', manifest_name)
for container_arch in base_archs:
header('{}: assembling image for: {}'.format(base_image, container_arch))
if base_tag:
# FIXME exceptions for RISC-V
# use 'edge' with Alpine
# https://gitlab.alpinelinux.org/alpine/aports/-/issues/13269
if container_arch == 'riscv64' and base_image == 'alpine':
container_base = 'docker://{}:edge'.format(base_image)
else:
container_base = 'docker://{}:{}'.format(base_image, base_tag)
else:
container_base = base_image
action('create a working container')
container_os = 'linux'
info('os = {}'.format(container_os))
info('arch = {}'.format(container_arch))
info('base = {}'.format(container_base))
container_id = run('buildah', 'from', '--arch', container_arch,
'--os', container_os, container_base, capture_output=True).strip()
info('container id = {}'.format(container_id))
action('update image configuration')
build_time = datetime.utcnow().isoformat(timespec='seconds')
info('build time = {}'.format(build_time))
run('buildah', 'config', '--add-history',
'--author', 'Enix <contact@enix.fr>',
'--label', 'org.opencontainers.image.title=X.509 Certificate Exporter',
'--label', 'org.opencontainers.image.description=A Prometheus exporter for certificates focusing on expiration monitoring.',
'--label', 'org.opencontainers.image.url=https://github.com/enix/x509-certificate-exporter',
'--label', 'org.opencontainers.image.documentation=https://github.com/enix/x509-certificate-exporter#readme',
'--label', 'org.opencontainers.image.sources=https://github.com/enix/sandbox-x509-certificate-exporter/blob/test-github-actions/.github/workflows/release.yml',
'--label', 'org.opencontainers.image.authors=Enix <contact@enix.fr>',
'--label', 'org.opencontainers.image.licenses=MIT',
'--label', 'org.opencontainers.image.version={}'.format(version),
'--label', 'org.opencontainers.image.revision={}'.format(os.environ['REVISION']),
'--label', 'org.opencontainers.image.created={}'.format(build_time),
'--os', container_os,
'--arch', container_arch,
'--workingdir', '/',
'--entrypoint', '["/x509-certificate-exporter"]',
'--cmd', '',
'--port', '9793/tcp',
container_id)
action('extract binary from assets')
asset_path = os.path.join(os.environ['WORKSPACE'], 'assets',
'x509-certificate-exporter-{}-{}.tar.gz'.format(container_os, container_arch))
binary_path = os.path.join(os.environ['WORKSPACE'], 'x509-certificate-exporter')
with tarfile.open(asset_path, 'r:gz') as tarfile:
with open(binary_path, 'xb') as binfile:
bin_data = tarfile.extractfile('x509-certificate-exporter')
binfile.write(bin_data.read())
action('copy binary and delete source')
run('buildah', 'copy', '--add-history', '--chmod', '0555', '--chown', '0:0',
container_id, binary_path, '/x509-certificate-exporter')
os.unlink(binary_path)
action('commit image')
image_id = run('buildah', 'commit', '--rm', '--format', 'docker',
'--manifest', manifest_name, container_id, capture_output=True).strip()
info('image id = {}'.format(image_id))
header('{}: inspect the multi-platform manifest'.format(base_image))
run('buildah', 'inspect', '--type', 'manifest', manifest_name)
header('{}: preparing push parameters'.format(base_image))
repositories = (
'docker.io/{}'.format(os.environ['REPOSITORY']),
'quay.io/{}'.format(os.environ['REPOSITORY']),
)
tags = [ version ]
if not prerelease:
info('not a prerelease version, adding the "latest" tag')
tags.append('latest')
info('tags to push to:')
destinations = []
for repo in repositories:
for tag in tags:
destinations.append('{}:{}-{}'.format(repo, tag, base_image))
# use Busybox as default image
if base_image == 'busybox':
destinations.append('{}:{}'.format(repo, tag))
for destination in destinations:
info(' {}'.format(destination))
header('{}: push manifest and images'.format(base_image))
for destination in destinations:
action('pushing: {}'.format(destination))
run('buildah', 'manifest', 'push', '--all', manifest_name, 'docker://{}'.format(destination))
header('{}: removing manifest'.format(base_image))
run('buildah', 'manifest', 'rm', manifest_name)
header('housekeeping')
action('purge all containers')
run('buildah', 'rm', '--all')
action('purge all images')
run('buildah', 'rmi', '--all', '--force')
- name: Convert Github changelog for Artifacthub
shell: python
env:
GITHUB_CHANGELOG_URL: https://github.com/enix/x509-certificate-exporter/releases/download/v${{ needs.draft-release.outputs.version }}/CHANGELOG.md
run: |
import os, yaml, re, requests
# Based on:
# - https://github.com/conventional-changelog/conventional-changelog/blob/master/packages/conventional-changelog-angular/writer-opts.js
# - https://github.com/artifacthub/hub/blob/master/web/src/layout/package/changelog/Content.tsx
header_to_kind = {
'Features': { 'kind': 'added', 'prefix': '' },
'Bug Fixes': { 'kind': 'fixed', 'prefix': '' },
'Reverts': { 'kind': 'removed', 'prefix': 'revert' },
'Performance Improvements': { 'kind': 'changed', 'prefix': 'perf' },
'BREAKING CHANGES': { 'kind': 'changed', 'prefix': 'BREAKING' },
# sections bellow won't show up in conventional-changelog unless having 'BREAKING' notes
'Documentation': { 'kind': 'changed', 'prefix': 'docs' },
'Styles': { 'kind': 'changed', 'prefix': 'style' },
'Code Refactoring': { 'kind': 'changed', 'prefix': 'refactor' },
'Tests': { 'kind': 'changed', 'prefix': 'test' },
'Build System': { 'kind': 'changed', 'prefix': 'build' },
'Continuous Integration': { 'kind': 'changed', 'prefix': 'ci' },
}
extract_log = re.compile(
r'\* '
r'(?:\*\*(?P<scope>.+):\*\* )?'
r'(?P<description>.*?)'
r'(?: \(\[[0-9a-f]+\]\((?P<commit>[^)]*)\)\)'
r'(?:, closes (?P<issues>.*))?'
r')?')
extract_issues = re.compile(
r' ?(?:(?:#[0-9+])|(?:\[#(?P<id>[0-9]+)\]\((?P<url>[^)]*)\)))+')
entries = []
changelog = requests.get(os.environ['GITHUB_CHANGELOG_URL']).text
print(changelog)
mapping = None
for line in changelog.splitlines():
if line.startswith('### '):
header = line[4:].strip()
mapping = header_to_kind.get(header, None)
continue
if mapping and line.startswith('*'):
match = extract_log.fullmatch(line)
if match is None:
raise ValueError('failed to extract log line: {}'.format(line))
scope = match.group('scope')
if scope == '*':
scope = None
kind = mapping.get('kind')
description = match.group('description')
desc_prefix = mapping.get('prefix')
if desc_prefix:
if scope:
description = '{}({}): {}'.format(desc_prefix, scope, description)
else:
description = '{}: {}'.format(desc_prefix, description)
else:
if scope == 'security':
kind = 'security'
elif scope:
description = '{}: {}'.format(scope, description)
links = []
commit_url = match.group('commit')
if commit_url:
links.append({
'name': 'GitHub commit',
'url': commit_url
})
issues = match.group('issues')
if issues:
for issue in extract_issues.finditer(issues):
links.append({
'name': 'GitHub issue #{}'.format(issue.group('id')),
'url': issue.group('url')
})
entry = {
'kind': kind,
'description': description
}
if len(links):
entry['links'] = links
entries.append(entry)
if len(entries):
output = yaml.dump(entries)
else:
output = ''
print(output)
with open(os.environ['GITHUB_ENV'], 'a') as outfile:
outfile.write('ARTIFACTHUB_CHANGELOG<<EOF\n')
outfile.write(output)
outfile.write('EOF\n')
# TODO
# - OCI releases?
# - signing?
- name: Run chart-releaser
shell: python
env:
WORKSPACE: ${{ github.workspace }}
PYTHONPATH: ${{ github.workspace }}/repository/.github
CR_PATH: ${{ github.workspace }}/bin/cr
CR_TOKEN: ${{ secrets.CHARTSREPO_GITHUB_TOKEN }}
CHART_NAME: x509-certificate-exporter
VERSION: ${{ needs.draft-release.outputs.version }}
PRERELEASE: ${{ needs.draft-release.outputs.prerelease }}
ARTIFACTHUB_CHANGELOG: ${{ env.ARTIFACTHUB_CHANGELOG }}
run: |
from lib import *
import os, yaml, shutil
chart_path = os.path.join(os.environ['WORKSPACE'], 'repository', 'deploy', 'charts', os.environ['CHART_NAME'])
os.chdir(chart_path)
header('prepare chart manifest')
version = os.environ['VERSION']
assert_length_above((
(version, 0),
))
is_prerelease = os.environ['PRERELEASE']
is_security_update = 'false' # FIXME
assert_in((
(is_prerelease, ('true', 'false')),
(is_security_update, ('true', 'false')),
))
info('version = {}'.format(version))
info('prerelease = {}'.format(is_prerelease))
info('security fix = {}'.format(is_security_update))
manifest_file = 'Chart.yaml'
manifest = yaml.safe_load(open(manifest_file, 'r'))
manifest.update({
'version': version,
'appVersion': version,
})
manifest['annotations'].update({
'artifacthub.io/prerelease': is_prerelease,
'artifacthub.io/containsSecurityUpdates': is_security_update,
})
changelog = os.environ['ARTIFACTHUB_CHANGELOG']
if len(changelog):
manifest['annotations'].update({
'artifacthub.io/changes': changelog,
})
open(manifest_file, 'w').write(yaml.dump(manifest))
header('inspect files to be released')
for yaml_file in (manifest_file,):
action('YAML: {}'.format(yaml_file))
print(yaml.safe_load(open(yaml_file, 'r')))
header('release the chart')
action('clone helm charts repository')
charts_repo = os.path.join(os.environ['WORKSPACE'], 'enix-charts')
run('git', 'clone', 'https://github.com/enix/helm-charts', charts_repo)
action('copy chart files')
repo_chart_path = os.path.join(charts_repo, 'charts', os.environ['CHART_NAME'])
shutil.copytree(chart_path, repo_chart_path, symlinks=True, dirs_exist_ok=False)
action('create the chart package')
run(os.environ['CR_PATH'], 'package', repo_chart_path, cwd=charts_repo)
action('upload the chart')
run(os.environ['CR_PATH'], 'upload', '--skip-existing', '--owner', 'enix', '--git-repo', 'helm-charts', cwd=charts_repo)
action('update repository index')
run(os.environ['CR_PATH'], 'index', '--push', '-i', 'index.yaml', '--owner', 'enix', '--git-repo', 'helm-charts', cwd=charts_repo)
cleanup:
name: Cleanup
needs:
- draft-release
- containers
runs-on: ubuntu-22.04
steps:
- name: Delete assets artifact
uses: geekyeggo/delete-artifact@v4
with:
name: binaries