Implement the normalisation and comparison of openshift config items

This commit is contained in:
Phil Elson 2021-07-16 09:45:22 +02:00
parent b74ffebeb2
commit 74ee8b5241
5 changed files with 149 additions and 26 deletions

View file

@ -10,23 +10,29 @@ variables:
PY_VERSION: "3.6" # This is what we have running in OpenShift currently.
## A full installation of CARA, tested with pytest.
#test_install:
# extends: .acc_py_full_test
#
#
## A development installation of CARA tested with pytest.
#test_dev:
# extends: .acc_py_dev_test
# A full installation of CARA, tested with pytest.
test_install:
extends: .acc_py_full_test
# A development installation of CARA tested with pytest.
test_openshift_config:
test_dev:
extends: .acc_py_dev_test
# A development installation of CARA tested with pytest.
.test_openshift_config:
variables:
CARA_INSTANCE: 'test-cara'
BRANCH: 'live/test-cara'
rules:
- if: '$OPENSHIFT_CONFIG_CHECKER_TOKEN_TEST_CARA && $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME == ${BRANCH}'
allow_failure: false # The live/test-cara must represent what is deployed.
- if: '$OPENSHIFT_CONFIG_CHECKER_TOKEN_TEST_CARA'
allow_failure: true # Anything other than live/test-cara can fail.
image: registry.cern.ch/docker.io/mambaorg/micromamba
before_script:
- micromamba create --yes -p $HOME/env python=3.9 wget -c conda-forge
- micromamba create --yes -p $HOME/env python=3.9 ruamel.yaml wget -c conda-forge
- export PATH=$HOME/env/bin/:$PATH
- wget https://github.com/openshift/origin/releases/download/v3.11.0/openshift-origin-client-tools-v3.11.0-0cbc58b-linux-64bit.tar.gz
- tar xzf ./openshift-origin-client-tools-v3.11.0-0cbc58b-linux-64bit.tar.gz
@ -35,10 +41,12 @@ test_openshift_config:
script:
- cd ./app-config/openshift
- oc login openshift-dev.cern.ch --token="${OPENSHIFT_CONFIG_CHECKER_TOKEN_TEST_CARA}"
- python ./fetch-config.py test-cara --output-directory ./test-cara/actual
- python ./generate-config.py test-cara --output-directory ./test-cara/expected
- python ./config-fetch.py test-cara --output-directory ./test-cara/actual
- python ./config-generate.py test-cara --output-directory ./test-cara/expected
- python ./config-normalise.py ./test-cara/actual ./test-cara/actual-normed
- python ./config-normalise.py ./test-cara/expected ./test-cara/expected-normed
- diff -u ./test-cara/actual-normed/ ./test-cara/expected-normed/
# - pytest ./test_config.py --arg test-cara
artifacts:
paths:
- ./app-config/openshift/test-cara/actual

View file

@ -194,10 +194,12 @@ If you need to create the application in a new project, run:
```console
$ cd app-config/openshift
$ oc process -f application.yaml --param PROJECT_NAME='test-cara' --param GIT_BRANCH='live/test-cara' | oc create -f -
$ oc process -f routes.yaml --param HOST='test-cara.web.cern.ch' | oc create -f -
$ oc process -f configmap.yaml | oc create -f -
$ oc process -f services.yaml | oc create -f -
$ oc process -f route.yaml --param HOST='test-cara.web.cern.ch' | oc create -f -
$ oc process -f imagestreams.yaml | oc create -f -
$ oc process -f buildconfig.yaml --param GIT_BRANCH='live/test-cara' | oc create -f -
$ oc process -f deploymentconfig.yaml --param PROJECT_NAME='test-cara' | oc create -f -
```
Then, create the webhook secret to be able to trigger automatic builds from GitLab.
@ -272,7 +274,7 @@ $ cd app-config/openshift
$ oc process -f configmap.yaml | oc replace -f -
$ oc process -f services.yaml | oc replace -f -
$ oc process -f route.yaml --param HOST='test-cara.web.cern.ch' | oc replace -f -
$ oc process -f routes.yaml --param HOST='test-cara.web.cern.ch' | oc replace -f -
$ oc process -f imagestreams.yaml | oc replace -f -
$ oc process -f buildconfig.yaml --param GIT_BRANCH='live/test-cara' | oc replace -f -
$ oc process -f deploymentconfig.yaml --param PROJECT_NAME='test-cara' | oc replace -f -
@ -280,4 +282,3 @@ $ oc process -f deploymentconfig.yaml --param PROJECT_NAME='test-cara' | oc repl
Be aware that if you change/replace the **route** of the PROD instance,
it will lose the annotation to be exposed outside CERN (not committed in this repo).

View file

@ -13,7 +13,7 @@ def configure_parser(parser: argparse.ArgumentParser) -> None:
help="Pick the instance for which you want to fetch the config",
)
parser.add_argument(
"--output-directory", default='config',
"-o", "--output-directory", default='config',
help="Location to put the config files",
)
@ -36,8 +36,8 @@ def fetch_config(output_directory: pathlib.Path):
output_directory.mkdir(exist_ok=True, parents=True)
for component in ['routes', 'configmap', 'services', 'imagestreams', 'buildconfig', 'deploymentconfig']:
with (output_directory / f'{component}.json').open('wt') as fh:
cmd = ['oc', 'get', '--export', '-o', 'json', component]
with (output_directory / f'{component}.yaml').open('wt') as fh:
cmd = ['oc', 'get', '--export', '-o', 'yaml', component]
print(f'Running: {" ".join(cmd)}')
subprocess.run(cmd, stdout=fh, check=True)
print(f'Config in: {output_directory.absolute()}')
@ -53,7 +53,6 @@ def handler(args: argparse.ArgumentParser) -> None:
actual_login_server = get_oc_server()
if actual_login_server != login_server:
print(actual_login_server)
print(f'\nPlease login to the correct openshift server with: \n\n oc login {login_server}\n', file=sys.stderr)
sys.exit(1)

View file

@ -1,7 +1,6 @@
import argparse
import pathlib
import subprocess
import sys
import typing
@ -13,7 +12,7 @@ def configure_parser(parser: argparse.ArgumentParser) -> None:
help="Pick the instance for which you want to generate the config",
)
parser.add_argument(
"--output-directory", default='config',
"-o", "--output-directory", default='config',
help="Location to put the config files",
)
@ -22,14 +21,13 @@ def generate_config(output_directory: pathlib.Path, project_name: str, hostname:
output_directory.mkdir(exist_ok=True, parents=True)
def oc_process(component_name: str, context: typing.Optional[dict] = None):
cmd = ['oc', 'process', '--local', '-f', f'{component_name}.yaml']
cmd = ['oc', 'process', '--local', '-f', f'{component_name}.yaml', '-o', 'yaml']
for ctx_name, ctx_value in (context or {}).items():
cmd.extend(['--param', f'{ctx_name}={ctx_value}'])
with (output_directory / f'{component_name}.json').open('wt') as fh:
with (output_directory / f'{component_name}.yaml').open('wt') as fh:
print(f'Running: {" ".join(cmd)}')
subprocess.run(cmd, stdout=fh, check=True)
# oc_process('route', oc_process + ['route.yaml', '--param', f'HOST={hostname}'])
oc_process('routes', context={'HOST': hostname})
oc_process('configmap')
oc_process('services')

View file

@ -0,0 +1,117 @@
import argparse
import pathlib
import ruamel.yaml
def configure_parser(parser: argparse.ArgumentParser) -> None:
parser.description = "Normalise openshift config files (by sorting and removing ephemeral values)"
parser.set_defaults(handler=handler)
parser.add_argument(
"config-directory",
help="The directory from which to find yaml files",
)
parser.add_argument(
"output-directory",
help="The directory to put normalized files (can be the same as config-directory)",
)
def clean_ephemeral_config(config: dict):
config = config.copy()
config.get('metadata', []).clear()
METADATA_TO_PRESERVE = ['labels', 'name']
for item in config['items']:
item.pop('status', None)
for key in list(item['metadata'].keys()):
if key not in METADATA_TO_PRESERVE:
del item['metadata'][key]
item.get('spec', {}).pop('clusterIP', None)
if item['kind'] == 'BuildConfig':
for trigger in item.get('spec', {}).get('triggers', []):
trigger.get('imageChange', {}).pop('lastTriggeredImageID', None)
if item['kind'] == 'DeploymentConfig':
item['spec'].get('template', {}).get('metadata', {}).pop('creationTimestamp', None)
for container in item['spec'].get('template', {}).get('spec', {}).get('containers', []):
# Drop the specific image name (and hash).
container.pop('image', None)
item['spec'].get('template', {}).get('metadata', {}).pop('creationTimestamp', None)
for trigger in item['spec'].get('triggers', []):
trigger.get('imageChangeParams', {}).pop('lastTriggeredImage', None)
# Drop the template part of the config for now.
# TODO: Remove this constraint to ensure our deployments reflect the fact that they are templated.
r = item['metadata'].get('labels', {}).pop('template', None)
if r is not None and not item['metadata']['labels']:
# Remove the empty labels dict if there is nothing left after popping the template item.
item['metadata'].pop('labels')
return config
def deep_sort(item):
if isinstance(item, dict):
# Sort by the key.
return {k: deep_sort(v) for k, v in sorted(item.items(), key=lambda i: i[0])}
elif isinstance(item, list):
# Use the metadata/name and fallback to the str representation to give a sort order.
def sort_key(value):
if isinstance(value, dict):
return value.get('metadata', {}).get('name', '') or str(value)
else:
return str(value)
return sorted(
[deep_sort(v) for v in item],
key=sort_key,
)
else:
return item
def normalise_config(input_directory: pathlib.Path, output_directory: pathlib.Path):
output_directory.mkdir(exist_ok=True, parents=True)
files = sorted(input_directory.glob('*.yaml'))
yaml = ruamel.yaml.YAML(typ='safe')
for file in files:
with file.open('rt') as fh:
content = yaml.load(fh)
config = clean_ephemeral_config(content)
config = deep_sort(config)
destination = output_directory / file.name
with destination.open('wt') as fh:
yaml.dump(config, fh)
print(f'Normalised {file.name} in {destination}')
print(f'Config in: {output_directory.absolute()}')
def handler(args: argparse.ArgumentParser) -> None:
normalise_config(
pathlib.Path(getattr(args, 'config-directory')),
pathlib.Path(getattr(args, 'output-directory')),
)
def main():
parser = argparse.ArgumentParser()
configure_parser(parser)
args = parser.parse_args()
args.handler(args)
if __name__ == '__main__':
main()