Merge branch 'master' into feature/report_reformulation

This commit is contained in:
Luis Aleixo 2021-07-30 12:45:23 +02:00
commit f7eda612f8
28 changed files with 868 additions and 265 deletions

View file

@ -2,3 +2,6 @@ venv
env*
prototypes
support
**/Dockerfile
_*

View file

@ -7,7 +7,7 @@ include:
variables:
project_name: cara
PY_VERSION: "3.6" # This is what we have running in OpenShift currently.
PY_VERSION: "3.9"
# A full installation of CARA, tested with pytest.
@ -20,14 +20,97 @@ test_dev:
extends: .acc_py_dev_test
# A development installation of CARA tested with pytest.
.test_openshift_config:
rules:
- if: '$OC_TOKEN && $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME == $BRANCH'
allow_failure: false # The branch must represent what is deployed.
- if: '$OC_TOKEN && $CI_MERGE_REQUEST_EVENT_TYPE != "detached"'
allow_failure: true # Anything other than the branch may fail without blocking the pipeline.
image: registry.cern.ch/docker.io/mambaorg/micromamba
before_script:
- micromamba create --yes -p $HOME/env python=3.9 ruamel.yaml wget -c conda-forge
- export PATH=$HOME/env/bin/:$PATH
- wget https://github.com/openshift/origin/releases/download/v3.11.0/openshift-origin-client-tools-v3.11.0-0cbc58b-linux-64bit.tar.gz
- tar xzf ./openshift-origin-client-tools-v3.11.0-0cbc58b-linux-64bit.tar.gz
- mv openshift-origin-client-tools-v3.11.0-0cbc58b-linux-64bit/oc $HOME/env/bin/
script:
- cd ./app-config/openshift
- oc login ${OC_SERVER} --token="${OC_TOKEN}"
- python ./config-fetch.py ${CARA_INSTANCE} --output-directory ./${CARA_INSTANCE}/actual
- python ./config-generate.py ${CARA_INSTANCE} --output-directory ./${CARA_INSTANCE}/expected
- python ./config-normalise.py ./${CARA_INSTANCE}/actual ./${CARA_INSTANCE}/actual-normed
- python ./config-normalise.py ./${CARA_INSTANCE}/expected ./${CARA_INSTANCE}/expected-normed
- diff -u ./test-cara/actual-normed/ ./${CARA_INSTANCE}/expected-normed/
artifacts:
paths:
- ./app-config/openshift/${CARA_INSTANCE}/actual
- ./app-config/openshift/${CARA_INSTANCE}/expected
check_openshift_config_test-cara:
extends: .test_openshift_config
variables:
CARA_INSTANCE: 'test-cara'
BRANCH: 'live/test-cara'
OC_SERVER: openshift-dev.cern.ch
OC_TOKEN: "${OPENSHIFT_CONFIG_CHECKER_TOKEN_TEST_CARA}"
# A development installation of CARA tested with pytest.
test_dev-39:
variables:
PY_VERSION: "3.9"
extends: .acc_py_dev_test
.image_builder:
# Build and push images to the openshift instance, which automatically triggers an application re-deployment.
stage: deploy
image:
# Based on guidance at https://gitlab.cern.ch/gitlabci-examples/build_docker_image.
name: gitlab-registry.cern.ch/ci-tools/docker-image-builder
entrypoint: [""]
rules:
- if: '$OPENSHIFT_DOCKER_TOKEN_TEST != "" && $CI_COMMIT_BRANCH == "live/test-cara"'
variables:
DOCKER_REGISTRY: "${OPENSHIFT_DOCKER_REGISTRY_TEST}"
DOCKER_TOKEN: "${OPENSHIFT_DOCKER_TOKEN_TEST}"
- if: '$OPENSHIFT_DOCKER_TOKEN_PROD != "" && $CI_COMMIT_BRANCH == "master"'
variables:
DOCKER_REGISTRY: "${OPENSHIFT_DOCKER_REGISTRY_PROD}"
DOCKER_TOKEN: "${OPENSHIFT_DOCKER_TOKEN_PROD}"
script:
- echo "{\"auths\":{\"$DOCKER_REGISTRY\":{\"auth\":\"$DOCKER_TOKEN\"}}}" > /kaniko/.docker/config.json
- /kaniko/executor --context $CI_PROJECT_DIR/$DOCKER_CONTEXT_DIRECTORY --dockerfile $CI_PROJECT_DIR/$DOCKERFILE_DIRECTORY/Dockerfile --destination $DOCKER_REGISTRY/$IMAGE_NAME:latest
auth-service-image_builder:
extends:
- .image_builder
variables:
IMAGE_NAME: auth-service
DOCKERFILE_DIRECTORY: app-config/auth-service
DOCKER_CONTEXT_DIRECTORY: app-config/auth-service
cara-webservice-image_builder:
extends:
- .image_builder
variables:
IMAGE_NAME: cara-webservice
DOCKERFILE_DIRECTORY: app-config/cara-webservice
DOCKER_CONTEXT_DIRECTORY: ""
trigger_build_on_openshift:
stage: deploy
rules:
- if: '$OPENSHIFT_BUILD_WEBHOOK_SECRET'
script:
- curl -X POST -k https://openshift.cern.ch:443/apis/build.openshift.io/v1/namespaces/cara/buildconfigs/cara-router/webhooks/${OPENSHIFT_BUILD_WEBHOOK_SECRET}/generic
- curl -X POST -k https://openshift.cern.ch:443/apis/build.openshift.io/v1/namespaces/cara/buildconfigs/cara-webservice/webhooks/${OPENSHIFT_BUILD_WEBHOOK_SECRET}/generic
- curl -X POST -k https://openshift.cern.ch:443/apis/build.openshift.io/v1/namespaces/cara/buildconfigs/auth-service/webhooks/${OPENSHIFT_BUILD_WEBHOOK_SECRET}/generic
deploy_to_test:
@ -36,8 +119,6 @@ deploy_to_test:
- if: '$CI_COMMIT_BRANCH == "live/test-cara" && $OPENSHIFT_TEST_BUILD_WEBHOOK_SECRET'
script:
- curl -X POST -k https://openshift-dev.cern.ch:443/apis/build.openshift.io/v1/namespaces/test-cara/buildconfigs/cara-router/webhooks/${OPENSHIFT_TEST_BUILD_WEBHOOK_SECRET}/generic
- curl -X POST -k https://openshift-dev.cern.ch:443/apis/build.openshift.io/v1/namespaces/test-cara/buildconfigs/cara-webservice/webhooks/${OPENSHIFT_TEST_BUILD_WEBHOOK_SECRET}/generic
- curl -X POST -k https://openshift-dev.cern.ch:443/apis/build.openshift.io/v1/namespaces/test-cara/buildconfigs/auth-service/webhooks/${OPENSHIFT_TEST_BUILD_WEBHOOK_SECRET}/generic
oci_calculator:
@ -45,10 +126,11 @@ oci_calculator:
stage: deploy
rules:
# Only run if branch is master (the default branch).
- if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH
image:
name: gitlab-registry.cern.ch/ci-tools/docker-image-builder
entrypoint: [""]
script:
- echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > /kaniko/.docker/config.json
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination $CI_REGISTRY_IMAGE/calculator:latest

View file

@ -1,4 +1,4 @@
FROM python:3.6
FROM python:3.9
COPY ./ /opt/cara/src
RUN python -m venv /opt/cara/app

View file

@ -108,11 +108,10 @@ python -m cara.apps.calculator --prefix=/mycalc
```
pip install -e . # At the root of the repository
voila ./app/cara.ipynb
voila cara/apps/expert/cara.ipynb --port=8080
```
Then visit http://localhost:8080/calculator.
Then visit http://localhost:8080.
### Running the tests
@ -128,8 +127,8 @@ pytest ./cara
```
s2i build file://$(pwd) --copy --keep-symlinks --context-dir ./app-config/nginx/ centos/nginx-112-centos7 cara-nginx-app
s2i build file://$(pwd) --copy --keep-symlinks --context-dir ./ centos/python-36-centos7 cara-webservice
s2i build file://$(pwd) --copy --keep-symlinks --context-dir ./app-config/auth-service centos/python-36-centos7 auth-service
docker build . -f ./app-config/cara-webservice/Dockerfile -t cara-webservice
docker build ./app-config/auth-service -t auth-service
```
Get the client secret from the CERN Application portal for the `cara-test` app. See [CERN-SSO-integration](#CERN-SSO-integration) for more info.
@ -194,10 +193,12 @@ If you need to create the application in a new project, run:
```console
$ cd app-config/openshift
$ oc process -f application.yaml --param PROJECT_NAME='test-cara' --param GIT_BRANCH='live/test-cara' | oc create -f -
$ oc process -f routes.yaml --param HOST='test-cara.web.cern.ch' | oc create -f -
$ oc process -f configmap.yaml | oc create -f -
$ oc process -f services.yaml | oc create -f -
$ oc process -f route.yaml --param HOST='test-cara.web.cern.ch' | oc create -f -
$ oc process -f imagestreams.yaml | oc create -f -
$ oc process -f buildconfig.yaml --param GIT_BRANCH='live/test-cara' | oc create -f -
$ oc process -f deploymentconfig.yaml --param PROJECT_NAME='test-cara' | oc create -f -
```
Then, create the webhook secret to be able to trigger automatic builds from GitLab.
@ -211,6 +212,17 @@ $ oc create secret generic \
gitlab-cara-webhook-secret
```
For CI usage, we also suggest creating a service account:
```console
oc create sa gitlab-config-checker
```
Under ``Resources`` -> ``Membership`` enable the ``View`` role for this new service account.
To get this new user's authentication token go to ``Resources`` -> ``Secrets`` and locate the token in the newly
created secret associated with the user (in this case ``gitlab-config-checker-token-XXXX``).
### CERN SSO integration
The SSO integration uses OpenID credentials configured in [CERN Applications portal](https://application-portal.web.cern.ch/).
@ -258,11 +270,14 @@ If you need to **update** existing configuration, then modify this repository an
```console
$ cd app-config/openshift
$ oc process -f application.yaml --param PROJECT_NAME='test-cara' --param GIT_BRANCH='live/test-cara' | oc replace -f -
$ oc process -f configmap.yaml | oc replace -f -
$ oc process -f services.yaml | oc replace -f -
$ oc process -f route.yaml --param HOST='test-cara.web.cern.ch' | oc replace -f -
$ oc process -f routes.yaml --param HOST='test-cara.web.cern.ch' | oc replace -f -
$ oc process -f imagestreams.yaml | oc replace -f -
$ oc process -f buildconfig.yaml --param GIT_BRANCH='live/test-cara' | oc replace -f -
$ oc process -f deploymentconfig.yaml --param PROJECT_NAME='test-cara' | oc replace -f -
```
Be aware that if you change/replace the **route** of the PROD instance, it will loose the annotation to be exposed outside CERN (not committed in this repo).
Be aware that if you change/replace the **route** of the PROD instance,
it will lose the annotation to be exposed outside CERN (not committed in this repo).

View file

@ -0,0 +1 @@
Dockerfile

View file

@ -0,0 +1,25 @@
FROM condaforge/mambaforge as conda
RUN mamba create --yes -p /opt/app python=3.9
COPY . /opt/app-source
RUN conda run -p /opt/app python -m pip install /opt/app-source
RUN cd /opt/app \
&& find -name '*.a' -delete \
&& rm -rf /opt/app/conda-meta \
&& rm -rf /opt/app/include \
&& find -name '__pycache__' -type d -exec rm -rf '{}' '+' \
&& rm -rf /opt/app/lib/python*/site-packages/pip /opt/app/lib/python*/idlelib /opt/app/lib/python*/ensurepip \
/opt/app/bin/x86_64-conda-linux-gnu-ld \
/opt/app/bin/sqlite3 \
/opt/app/bin/openssl \
/opt/app/share/terminfo \
&& find /opt/app/lib/ -name 'tests' -type d -exec rm -rf '{}' '+' \
&& find /opt/app/lib -name '*.pyx' -delete \
;
FROM debian
COPY --from=conda /opt/app /opt/app
CMD [ \
"/opt/app/bin/python", "-m", "auth_service" \
]

View file

@ -1 +0,0 @@
python -m auth_service

View file

@ -11,6 +11,7 @@ import typing
import aiohttp
from keycloak.aio.realm import KeycloakRealm
import tornado.ioloop
import tornado.log
import tornado.web
@ -161,6 +162,7 @@ class MainHandler(BaseHandler):
def make_app():
tornado.log.enable_pretty_logging()
return tornado.web.Application(
[
(r"/", MainHandler),

View file

@ -0,0 +1,31 @@
FROM condaforge/mambaforge as conda
RUN mamba create --yes -p /opt/app python=3.9
COPY . /opt/app-source
RUN cd /opt/app-source && conda run -p /opt/app python -m pip install -r ./requirements.txt .[app]
COPY app-config/cara-webservice/app.sh /opt/app/bin/cara-app.sh
RUN cd /opt/app \
&& find -name '*.a' -delete \
&& rm -rf /opt/app/conda-meta \
&& rm -rf /opt/app/include \
&& find -name '__pycache__' -type d -exec rm -rf '{}' '+' \
&& rm -rf /opt/app/lib/python*/site-packages/pip /opt/app/lib/python*/idlelib /opt/app/lib/python*/ensurepip \
/opt/app/bin/x86_64-conda-linux-gnu-ld \
/opt/app/bin/sqlite3 \
/opt/app/bin/openssl \
/opt/app/share/terminfo \
&& find /opt/app/lib/ -name 'tests' -type d -exec rm -rf '{}' '+' \
&& find /opt/app/lib -name '*.pyx' -delete \
;
FROM debian
COPY --from=conda /opt/app /opt/app
ENV PATH=/opt/app/bin/:$PATH
# Make a convenient location to the installed CARA package (i.e. a directory called cara in the CWD).
RUN CARA_INIT_FILE=$(/opt/app/bin/python -c "import cara; print(cara.__file__)") \
&& ln -s $(dirname $(dirname ${CARA_INIT_FILE})) /opt/site-packages \
&& ln -s /opt/site-packages/cara ./cara
CMD [ \
"cara-app.sh" \
]

View file

@ -18,6 +18,9 @@ if [[ "$APP_NAME" == "cara-webservice" ]]; then
python -m cara.apps.calculator "${args[@]}"
elif [[ "$APP_NAME" == "cara-voila" ]]; then
echo "Starting the voila service"
voila app/ --port=8080 --no-browser --base_url=/voila-server/ --Voila.tornado_settings="{'allow_origin': '*'}"
voila cara/apps/expert/ --port=8080 --no-browser --base_url=/voila-server/ --tornado_settings 'allow_origin=*'
else
echo "No APP_NAME specified"
exit 1
fi

View file

@ -0,0 +1,90 @@
---
kind: "Template"
apiVersion: "v1"
metadata:
name: "cara-application"
creationTimestamp: null
annotations:
description: "CARA build config OpenShift template."
tags: "cara-application"
labels:
template: "cara-application"
objects:
-
kind: BuildConfig
apiVersion: v1
metadata:
name: cara-router
labels:
template: "cara-application"
spec:
source:
type: Git
git:
ref: ${GIT_BRANCH}
uri: ${GIT_REPO}
contextDir: app-config/nginx
sourceSecret:
name: sshdeploykey
postCommit: {}
resources: {}
runPolicy: Serial
output:
to:
kind: ImageStreamTag
name: 'cara-router:latest'
strategy:
sourceStrategy:
from:
kind: ImageStreamTag
name: 'nginx:1.12'
namespace: openshift
type: Source
triggers:
- generic:
secretReference:
name: gitlab-cara-webhook-secret
type: Generic
nodeSelector: null
-
kind: BuildConfig
apiVersion: v1
metadata:
name: cara-webservice
labels:
template: "cara-application"
spec:
source:
type: Git
git:
ref: ${GIT_BRANCH}
uri: ${GIT_REPO}
sourceSecret:
name: sshdeploykey
postCommit: {}
resources: {}
runPolicy: Serial
output:
to:
kind: ImageStreamTag
name: 'cara-webservice:latest'
strategy:
sourceStrategy:
from:
kind: ImageStreamTag
name: 'python:3.6'
namespace: openshift
type: Source
triggers:
- generic:
secretReference:
name: gitlab-cara-webhook-secret
type: Generic
nodeSelector: null
parameters:
- name: GIT_REPO
description: The GIT repo URL
value: 'ssh://git@gitlab.cern.ch:7999/cara/cara.git'
- name: GIT_BRANCH
description: The name of the GIT branch to use when building the app, e.g. `live/test-cara` in TEST, `master` in prod
required: true

View file

@ -0,0 +1,72 @@
import argparse
import pathlib
import subprocess
import sys
import typing
def configure_parser(parser: argparse.ArgumentParser) -> None:
parser.description = "Fetch the openshift config for CARA"
parser.set_defaults(handler=handler)
parser.add_argument(
"instance", choices=['cara', 'test-cara'],
help="Pick the instance for which you want to fetch the config",
)
parser.add_argument(
"-o", "--output-directory", default='config',
help="Location to put the config files",
)
def get_oc_server() -> typing.Optional[str]:
# Return the openshift server that is currently logged in, or None if not logged in
# (or other issues getting the information from the oc client).
try:
subprocess.check_output(['oc', 'whoami'], stderr=subprocess.PIPE)
except subprocess.CalledProcessError:
# User not logged on, or oc command missing.
return None
return subprocess.run([
'oc', 'whoami', '--show-server'
], check=True, stdout=subprocess.PIPE).stdout.decode().strip()
def fetch_config(output_directory: pathlib.Path):
output_directory.mkdir(exist_ok=True, parents=True)
for component in ['routes', 'configmap', 'services', 'imagestreams', 'buildconfig', 'deploymentconfig']:
with (output_directory / f'{component}.yaml').open('wt') as fh:
cmd = ['oc', 'get', '--export', '-o', 'yaml', component]
print(f'Running: {" ".join(cmd)}')
subprocess.run(cmd, stdout=fh, check=True)
print(f'Config in: {output_directory.absolute()}')
def handler(args: argparse.ArgumentParser) -> None:
if args.instance == 'cara':
login_server = 'https://openshift.cern.ch:443'
project_name = 'cara'
elif args.instance == 'test-cara':
login_server = 'https://openshift-dev.cern.ch:443'
project_name = 'test-cara'
actual_login_server = get_oc_server()
if actual_login_server != login_server:
print(f'\nPlease login to the correct openshift server with: \n\n oc login {login_server}\n', file=sys.stderr)
sys.exit(1)
subprocess.run(['oc', 'project', project_name], stdout=subprocess.DEVNULL, check=True)
fetch_config(pathlib.Path(args.output_directory))
def main():
parser = argparse.ArgumentParser()
configure_parser(parser)
args = parser.parse_args()
args.handler(args)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,63 @@
import argparse
import pathlib
import subprocess
import typing
def configure_parser(parser: argparse.ArgumentParser) -> None:
parser.description = "Generate the config files which can be later submitted to openshift"
parser.set_defaults(handler=handler)
parser.add_argument(
"instance", choices=['cara', 'test-cara'],
help="Pick the instance for which you want to generate the config",
)
parser.add_argument(
"-o", "--output-directory", default='config',
help="Location to put the config files",
)
def generate_config(output_directory: pathlib.Path, project_name: str, hostname: str, branch: str):
output_directory.mkdir(exist_ok=True, parents=True)
def oc_process(component_name: str, context: typing.Optional[dict] = None):
cmd = ['oc', 'process', '--local', '-f', f'{component_name}.yaml', '-o', 'yaml']
for ctx_name, ctx_value in (context or {}).items():
cmd.extend(['--param', f'{ctx_name}={ctx_value}'])
with (output_directory / f'{component_name}.yaml').open('wt') as fh:
print(f'Running: {" ".join(cmd)}')
subprocess.run(cmd, stdout=fh, check=True)
oc_process('routes', context={'HOST': hostname})
oc_process('configmap')
oc_process('services')
oc_process('imagestreams')
oc_process('buildconfig', context={'GIT_BRANCH': branch})
oc_process('deploymentconfig', context={'PROJECT_NAME': project_name})
print(f'Config in: {output_directory.absolute()}')
def handler(args: argparse.ArgumentParser) -> None:
if args.instance == 'cara':
project_name = 'cara'
branch = 'master'
hostname = 'cara.web.cern.ch'
elif args.instance == 'test-cara':
branch = 'live/test-cara'
project_name = 'test-cara'
hostname = 'test-cara.web.cern.ch'
generate_config(pathlib.Path(args.output_directory), project_name, hostname, branch)
def main():
parser = argparse.ArgumentParser()
configure_parser(parser)
args = parser.parse_args()
args.handler(args)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,117 @@
import argparse
import pathlib
import ruamel.yaml
def configure_parser(parser: argparse.ArgumentParser) -> None:
parser.description = "Normalise openshift config files (by sorting and removing ephemeral values)"
parser.set_defaults(handler=handler)
parser.add_argument(
"config-directory",
help="The directory from which to find yaml files",
)
parser.add_argument(
"output-directory",
help="The directory to put normalized files (can be the same as config-directory)",
)
def clean_ephemeral_config(config: dict):
config = config.copy()
config.get('metadata', []).clear()
METADATA_TO_PRESERVE = ['labels', 'name']
for item in config['items']:
item.pop('status', None)
for key in list(item['metadata'].keys()):
if key not in METADATA_TO_PRESERVE:
del item['metadata'][key]
item.get('spec', {}).pop('clusterIP', None)
if item['kind'] == 'BuildConfig':
for trigger in item.get('spec', {}).get('triggers', []):
trigger.get('imageChange', {}).pop('lastTriggeredImageID', None)
if item['kind'] == 'DeploymentConfig':
item['spec'].get('template', {}).get('metadata', {}).pop('creationTimestamp', None)
for container in item['spec'].get('template', {}).get('spec', {}).get('containers', []):
# Drop the specific image name (and hash).
container.pop('image', None)
item['spec'].get('template', {}).get('metadata', {}).pop('creationTimestamp', None)
for trigger in item['spec'].get('triggers', []):
trigger.get('imageChangeParams', {}).pop('lastTriggeredImage', None)
# Drop the template part of the config for now.
# TODO: Remove this constraint to ensure our deployments reflect the fact that they are templated.
r = item['metadata'].get('labels', {}).pop('template', None)
if r is not None and not item['metadata']['labels']:
# Remove the empty labels dict if there is nothing left after popping the template item.
item['metadata'].pop('labels')
return config
def deep_sort(item):
if isinstance(item, dict):
# Sort by the key.
return {k: deep_sort(v) for k, v in sorted(item.items(), key=lambda i: i[0])}
elif isinstance(item, list):
# Use the metadata/name and fallback to the str representation to give a sort order.
def sort_key(value):
if isinstance(value, dict):
return value.get('metadata', {}).get('name', '') or str(value)
else:
return str(value)
return sorted(
[deep_sort(v) for v in item],
key=sort_key,
)
else:
return item
def normalise_config(input_directory: pathlib.Path, output_directory: pathlib.Path):
output_directory.mkdir(exist_ok=True, parents=True)
files = sorted(input_directory.glob('*.yaml'))
yaml = ruamel.yaml.YAML(typ='safe')
for file in files:
with file.open('rt') as fh:
content = yaml.load(fh)
config = clean_ephemeral_config(content)
config = deep_sort(config)
destination = output_directory / file.name
with destination.open('wt') as fh:
yaml.dump(config, fh)
print(f'Normalised {file.name} in {destination}')
print(f'Config in: {output_directory.absolute()}')
def handler(args: argparse.ArgumentParser) -> None:
normalise_config(
pathlib.Path(getattr(args, 'config-directory')),
pathlib.Path(getattr(args, 'output-directory')),
)
def main():
parser = argparse.ArgumentParser()
configure_parser(parser)
args = parser.parse_args()
args.handler(args)
if __name__ == '__main__':
main()

View file

@ -3,161 +3,26 @@
apiVersion: "v1"
metadata:
name: "cara-application"
creationTimestamp: null
annotations:
description: "CARA application OpenShift template."
tags: "cara-application"
labels:
template: "cara-application"
objects:
-
kind: BuildConfig
apiVersion: v1
metadata:
name: auth-service
spec:
source:
git:
ref: ${GIT_BRANCH}
uri: ${GIT_REPO}
contextDir: app-config/auth-service
sourceSecret:
name: sshdeploykey
output:
to:
kind: ImageStreamTag
name: 'auth-service:latest'
strategy:
sourceStrategy:
from:
kind: ImageStreamTag
name: 'python:3.6'
namespace: openshift
type: Source
triggers:
- imageChange:
type: ImageChange
- generic:
secretReference:
name: gitlab-cara-webhook-secret
type: Generic
-
kind: ImageStream
apiVersion: v1
metadata:
name: auth-service
-
kind: BuildConfig
apiVersion: v1
metadata:
name: cara-router
spec:
source:
git:
ref: ${GIT_BRANCH}
uri: ${GIT_REPO}
contextDir: app-config/nginx
sourceSecret:
name: sshdeploykey
output:
to:
kind: ImageStreamTag
name: 'cara-router:latest'
strategy:
sourceStrategy:
from:
kind: ImageStreamTag
name: 'nginx:1.12'
namespace: openshift
type: Source
triggers:
- generic:
secretReference:
name: gitlab-cara-webhook-secret
type: Generic
-
kind: ImageStream
apiVersion: v1
metadata:
name: cara-app
-
kind: BuildConfig
apiVersion: v1
metadata:
name: cara-router
spec:
source:
git:
ref: ${GIT_BRANCH}
uri: ${GIT_REPO}
contextDir: app-config/nginx
sourceSecret:
name: sshdeploykey
output:
to:
kind: ImageStreamTag
name: 'cara-router:latest'
strategy:
sourceStrategy:
from:
kind: ImageStreamTag
name: 'nginx:1.12'
namespace: openshift
type: Source
triggers:
- generic:
secretReference:
name: gitlab-cara-webhook-secret
type: Generic
-
kind: ImageStream
apiVersion: v1
metadata:
name: cara-router
-
kind: BuildConfig
apiVersion: v1
metadata:
name: cara-webservice
spec:
source:
git:
ref: ${GIT_BRANCH}
uri: ${GIT_REPO}
sourceSecret:
name: sshdeploykey
output:
to:
kind: ImageStreamTag
name: 'cara-webservice:latest'
strategy:
sourceStrategy:
from:
kind: ImageStreamTag
name: 'python:3.6'
namespace: openshift
type: Source
triggers:
- generic:
secretReference:
name: gitlab-cara-webhook-secret
type: Generic
-
kind: ImageStream
apiVersion: v1
metadata:
name: cara-webservice
-
apiVersion: v1
kind: DeploymentConfig
metadata:
name: auth-service
labels:
app: auth-service
spec:
replicas;: 1
replicas: 1
template:
metadata:
labels:
app: auth-service
deploymentconfig: auth-service
spec:
containers:
- name: auth-service
@ -165,11 +30,33 @@
ports:
- containerPort: 8080
protocol: TCP
- envFrom:
- configMapRef:
name: auth-service
- secretRef:
name: auth-service-secrets
envFrom:
- configMapRef:
name: auth-service
- secretRef:
name: auth-service-secrets
imagePullPolicy: Always
resources: {}
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: File
dnsPolicy: ClusterFirst
restartPolicy: Always
schedulerName: default-scheduler
securityContext: { }
terminationGracePeriodSeconds: 30
strategy:
activeDeadlineSeconds: 21600
resources: { }
rollingParams:
intervalSeconds: 1
maxSurge: 25%
maxUnavailable: 25%
timeoutSeconds: 600
updatePeriodSeconds: 1
type: Rolling
test: false
selector:
deploymentconfig: auth-service
triggers:
- type: ConfigChange
- type: ImageChange
@ -187,7 +74,7 @@
metadata:
name: cara-app
spec:
replicas;: 1
replicas: 1
template:
metadata:
labels:
@ -202,6 +89,28 @@
ports:
- containerPort: 8080
protocol: TCP
imagePullPolicy: Always
resources: {}
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: File
dnsPolicy: ClusterFirst
restartPolicy: Always
schedulerName: default-scheduler
securityContext: { }
terminationGracePeriodSeconds: 30
strategy:
activeDeadlineSeconds: 21600
resources: { }
rollingParams:
intervalSeconds: 1
maxSurge: 25%
maxUnavailable: 25%
timeoutSeconds: 600
updatePeriodSeconds: 1
type: Rolling
test: false
selector:
app: cara-app
triggers:
- type: ConfigChange
- type: ImageChange
@ -219,7 +128,7 @@
metadata:
name: cara-router
spec:
replicas;: 1
replicas: 1
template:
metadata:
labels:
@ -233,6 +142,28 @@
protocol: TCP
- containerPort: 8443
protocol: TCP
imagePullPolicy: Always
resources: {}
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: File
dnsPolicy: ClusterFirst
restartPolicy: Always
schedulerName: default-scheduler
securityContext: { }
terminationGracePeriodSeconds: 30
strategy:
activeDeadlineSeconds: 21600
resources: { }
rollingParams:
intervalSeconds: 1
maxSurge: 25%
maxUnavailable: 25%
timeoutSeconds: 600
updatePeriodSeconds: 1
type: Rolling
test: false
selector:
app: cara-router
triggers:
- type: ConfigChange
- type: ImageChange
@ -251,7 +182,7 @@
metadata:
name: cara-webservice
spec:
replicas;: 1
replicas: 1
template:
metadata:
labels:
@ -275,8 +206,45 @@
ports:
- containerPort: 8080
protocol: TCP
imagePullPolicy: Always
readinessProbe:
failureThreshold: 3
httpGet:
path: /calculator-cern
port: 8080
scheme: HTTP
initialDelaySeconds: 3
periodSeconds: 10
successThreshold: 1
timeoutSeconds: 1
resources:
limits:
cpu: '3'
memory: 3Gi
requests:
cpu: '1'
memory: 1Gi
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: File
dnsPolicy: ClusterFirst
restartPolicy: Always
schedulerName: default-scheduler
securityContext: { }
terminationGracePeriodSeconds: 30
strategy:
activeDeadlineSeconds: 21600
resources: { }
rollingParams:
intervalSeconds: 1
maxSurge: 25%
maxUnavailable: 25%
timeoutSeconds: 600
updatePeriodSeconds: 1
type: Rolling
test: false
selector:
app: cara-webservice
triggers:
- type: ConfigChange
- type: ImageChange
imageChangeParams:
automatic: true
@ -293,7 +261,7 @@
metadata:
name: cara-calculator-open
spec:
replicas;: 1
replicas: 1
template:
metadata:
labels:
@ -310,6 +278,28 @@
ports:
- containerPort: 8080
protocol: TCP
imagePullPolicy: Always
resources: {}
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: File
dnsPolicy: ClusterFirst
restartPolicy: Always
schedulerName: default-scheduler
securityContext: { }
terminationGracePeriodSeconds: 30
strategy:
activeDeadlineSeconds: 21600
resources: { }
rollingParams:
intervalSeconds: 1
maxSurge: 25%
maxUnavailable: 25%
timeoutSeconds: 600
updatePeriodSeconds: 1
type: Rolling
test: false
selector:
app: cara-calculator-open
triggers:
- type: ConfigChange
- type: ImageChange
@ -327,9 +317,3 @@
- name: PROJECT_NAME
description: The name of this project, e.g. test-cara
required: true
- name: GIT_REPO
description: The GIT repo URL
value: 'ssh://git@gitlab.cern.ch:7999/cara/cara.git'
- name: GIT_BRANCH
description: The name of the GIT branch to use when building the app, e.g. `live/test-cara` in TEST, `master` in prod
required: true

View file

@ -0,0 +1,36 @@
---
kind: "Template"
apiVersion: "v1"
metadata:
name: "cara-imagestreams"
creationTimestamp: null
annotations:
description: "CARA imagestreams OpenShift template."
tags: "cara-imagestreams"
labels:
template: "cara-application"
objects:
-
kind: ImageStream
apiVersion: v1
metadata:
name: auth-service
spec:
lookupPolicy:
local: False
-
kind: ImageStream
apiVersion: v1
metadata:
name: cara-router
spec:
lookupPolicy:
local: False
-
kind: ImageStream
apiVersion: v1
metadata:
name: cara-webservice
spec:
lookupPolicy:
local: False

View file

@ -15,6 +15,8 @@
kind: Route
metadata:
name: cara-route
labels:
app: "cara-route"
spec:
host: ${HOST}
port:
@ -25,6 +27,8 @@
to:
kind: Service
name: cara-router
weight: 100
wildcardPolicy: None
parameters:
- name: HOST

View file

@ -25,6 +25,8 @@
targetPort: 8080
selector:
deploymentconfig: auth-service
sessionAffinity: 'None'
type: 'ClusterIP'
-
apiVersion: v1
kind: Service
@ -40,6 +42,8 @@
targetPort: 8080
selector:
deploymentconfig: cara-app
sessionAffinity: 'None'
type: 'ClusterIP'
-
apiVersion: v1
kind: Service
@ -53,12 +57,10 @@
port: 8080
protocol: TCP
targetPort: 8080
- name: 8443-tcp
port: 8443
protocol: TCP
targetPort: 8443
selector:
deploymentconfig: cara-router
sessionAffinity: 'None'
type: 'ClusterIP'
-
apiVersion: v1
kind: Service
@ -74,6 +76,8 @@
targetPort: 8080
selector:
deploymentconfig: cara-webservice
sessionAffinity: 'None'
type: 'ClusterIP'
-
apiVersion: v1
kind: Service
@ -89,3 +93,5 @@
targetPort: 8080
selector:
deploymentconfig: cara-calculator-open
sessionAffinity: 'None'
type: 'ClusterIP'

View file

@ -5,6 +5,7 @@ import asyncio
import concurrent.futures
import datetime
import base64
import functools
import html
import json
import os
@ -15,7 +16,9 @@ import uuid
import zlib
import jinja2
import loky
from tornado.web import Application, RequestHandler, StaticFileHandler
import tornado.log
from . import markdown_tools
from . import model_generator
@ -111,10 +114,18 @@ class ConcentrationModel(BaseRequestHandler):
base_url = self.request.protocol + "://" + self.request.host
report_generator: ReportGenerator = self.settings['report_generator']
report = report_generator.build_report(base_url, form)
if self.settings.get("debug", False):
dt = (datetime.datetime.now() - start)
print(f'Report response time {dt.seconds}.{dt.microseconds}s')
executor = loky.get_reusable_executor(
max_workers=self.settings['handler_worker_pool_size'],
timeout=300,
)
report_task = executor.submit(
report_generator.build_report, base_url, form,
executor_factory=functools.partial(
concurrent.futures.ThreadPoolExecutor,
self.settings['report_generation_parallelism'],
),
)
report: str = await asyncio.wrap_future(report_task)
self.finish(report)
@ -123,11 +134,18 @@ class StaticModel(BaseRequestHandler):
form = model_generator.FormData.from_dict(model_generator.baseline_raw_form_data())
base_url = self.request.protocol + "://" + self.request.host
report_generator: ReportGenerator = self.settings['report_generator']
report = report_generator.build_report(base_url, form)
executor = loky.get_reusable_executor(max_workers=self.settings['handler_worker_pool_size'])
report_task = executor.submit(
report_generator.build_report, base_url, form,
executor_factory=functools.partial(
concurrent.futures.ThreadPoolExecutor,
self.settings['report_generation_parallelism'],
),
)
report: str = await asyncio.wrap_future(report_task)
self.finish(report)
class LandingPage(BaseRequestHandler):
def get(self):
template = self.settings["template_environment"].get_template(
@ -222,6 +240,9 @@ def make_app(
template_environment.get_template('common_text.md.j2')
)
if debug:
tornado.log.enable_pretty_logging()
return Application(
urls,
debug=debug,
@ -233,4 +254,19 @@ def make_app(
# COOKIE_SECRET being undefined will result in no login information being
# presented to the user.
cookie_secret=os.environ.get('COOKIE_SECRET', '<undefined>'),
# Process parallelism controls. There is a balance between serving a single report
# requests quickly or serving multiple requests concurrently.
# The defaults are: handle one report at a time, and allow parallelism
# of that report generation. A value of ``None`` will result in the number of
# processes being determined based on the number of CPUs. For some deployments,
# such as on OpenShift this number does *not* reflect the real number of CPUs that
# can be used, and it is recommended to specify these values explicitly (through
# the environment variables).
handler_worker_pool_size=(
int(os.environ.get("HANDLER_WORKER_POOL_SIZE", 1)) or None
),
report_generation_parallelism=(
int(os.environ.get('REPORT_PARALLELISM', 0)) or None
),
)

View file

@ -4,15 +4,16 @@ import dataclasses
from datetime import datetime, timedelta
import io
import typing
import urllib
import zlib
import qrcode
import urllib
import loky
import jinja2
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
import numpy as np
import qrcode
from cara import models
from ... import monte_carlo as mc
@ -32,17 +33,17 @@ def calculate_report_data(model: models.ExposureModel):
resolution = 600
t_start, t_end = model_start_end(model)
times = list(np.linspace(t_start, t_end, resolution))
concentrations = [np.mean(model.concentration_model.concentration(time))
times = np.linspace(t_start, t_end, resolution)
concentrations = [np.array(model.concentration_model.concentration(time)).mean()
for time in times]
highest_const = max(concentrations)
prob = np.mean(model.infection_probability())
er = np.mean(model.concentration_model.infected.emission_rate_when_present())
prob = np.array(model.infection_probability()).mean()
er = np.array(model.concentration_model.infected.emission_rate_when_present()).mean()
exposed_occupants = model.exposed.number
expected_new_cases = np.mean(model.expected_new_cases())
expected_new_cases = np.array(model.expected_new_cases()).mean()
return {
"times": times,
"times": list(times),
"concentrations": concentrations,
"highest_const": highest_const,
"prob_inf": prob,
@ -166,7 +167,7 @@ def non_zero_percentage(percentage: int) -> str:
elif percentage < 1:
return "{:0.2f}%".format(percentage)
else:
return "{:0.0f}%".format(percentage)
return "{:0.1f}%".format(percentage)
def manufacture_alternative_scenarios(form: FormData) -> typing.Dict[str, mc.ExposureModel]:
@ -175,11 +176,13 @@ def manufacture_alternative_scenarios(form: FormData) -> typing.Dict[str, mc.Exp
# Two special option cases - HEPA and/or FFP2 masks.
FFP2_being_worn = bool(form.mask_wearing_option == 'mask_on' and form.mask_type == 'FFP2')
if FFP2_being_worn and form.hepa_option:
scenarios['Base scenario with HEPA and FFP2 masks'] = form.build_mc_model()
elif FFP2_being_worn:
scenarios['Base scenario with FFP2 masks'] = form.build_mc_model()
elif form.hepa_option:
scenarios['Base scenario with HEPA filter'] = form.build_mc_model()
FFP2andHEPAalternative = dataclass_utils.replace(form, mask_type='Type I')
scenarios['Base scenario with HEPA filter and Type I masks'] = FFP2andHEPAalternative.build_mc_model()
if not FFP2_being_worn and form.hepa_option:
noHEPAalternative = dataclass_utils.replace(form, mask_type = 'FFP2')
noHEPAalternative = dataclass_utils.replace(noHEPAalternative, mask_wearing_option = 'mask_on')
noHEPAalternative = dataclass_utils.replace(noHEPAalternative, hepa_option=False)
scenarios['Base scenario without HEPA filter, with FFP2 masks'] = noHEPAalternative.build_mc_model()
# The remaining scenarios are based on Type I masks (possibly not worn)
# and no HEPA filtration.
@ -191,11 +194,11 @@ def manufacture_alternative_scenarios(form: FormData) -> typing.Dict[str, mc.Exp
without_mask = dataclass_utils.replace(form, mask_wearing_option='mask_off')
if form.ventilation_type == 'mechanical_ventilation':
scenarios['Mechanical ventilation with Type I masks'] = with_mask.build_mc_model()
#scenarios['Mechanical ventilation with Type I masks'] = with_mask.build_mc_model()
scenarios['Mechanical ventilation without masks'] = without_mask.build_mc_model()
elif form.ventilation_type == 'natural_ventilation':
scenarios['Windows open with Type I masks'] = with_mask.build_mc_model()
#scenarios['Windows open with Type I masks'] = with_mask.build_mc_model()
scenarios['Windows open without masks'] = without_mask.build_mc_model()
# No matter the ventilation scheme, we include scenarios which don't have any ventilation.
@ -251,15 +254,20 @@ def scenario_statistics(mc_model: mc.ExposureModel, sample_times: np.ndarray):
}
def comparison_report(scenarios: typing.Dict[str, mc.ExposureModel], sample_times: np.ndarray):
def comparison_report(
scenarios: typing.Dict[str, mc.ExposureModel],
sample_times: np.ndarray,
executor_factory: typing.Callable[[], concurrent.futures.Executor],
):
statistics = {}
with concurrent.futures.ProcessPoolExecutor() as executor:
with executor_factory() as executor:
results = executor.map(
scenario_statistics,
scenarios.values(),
[sample_times] * len(scenarios),
timeout=60,
)
for (name, model), model_stats in zip(scenarios.items(), results):
statistics[name] = model_stats
return {
@ -273,12 +281,23 @@ class ReportGenerator:
jinja_loader: jinja2.BaseLoader
calculator_prefix: str
def build_report(self, base_url: str, form: FormData) -> str:
def build_report(
self,
base_url: str,
form: FormData,
executor_factory: typing.Callable[[], concurrent.futures.Executor],
) -> str:
model = form.build_model()
context = self.prepare_context(base_url, model, form)
context = self.prepare_context(base_url, model, form, executor_factory=executor_factory)
return self.render(context)
def prepare_context(self, base_url: str, model: models.ExposureModel, form: FormData) -> dict:
def prepare_context(
self,
base_url: str,
model: models.ExposureModel,
form: FormData,
executor_factory: typing.Callable[[], concurrent.futures.Executor],
) -> dict:
now = datetime.utcnow().astimezone()
time = now.strftime("%Y-%m-%d %H:%M:%S UTC")
@ -293,7 +312,9 @@ class ReportGenerator:
context.update(calculate_report_data(model))
alternative_scenarios = manufacture_alternative_scenarios(form)
context['alternative_scenarios'] = comparison_report(alternative_scenarios, scenario_sample_times)
context['alternative_scenarios'] = comparison_report(
alternative_scenarios, scenario_sample_times, executor_factory=executor_factory,
)
context['qr_code'] = generate_qr_code(base_url, self.calculator_prefix, form)
context['calculator_prefix'] = self.calculator_prefix
context['scale_warning'] = {

View file

@ -349,7 +349,6 @@
{% if form.mask_wearing_option == "mask_on" %}
<li><p class="data_text">Mask type: {{ form.mask_type }}</p></li>
{% endif %}
</ul>
{% endblock simulation_overview %}

View file

@ -1,8 +1,10 @@
import concurrent.futures
from functools import partial
import time
import pytest
from cara.apps.calculator import report_generator
from cara.apps.calculator.report_generator import ReportGenerator, readable_minutes
from cara.apps.calculator import make_app
@ -15,8 +17,10 @@ def test_generate_report(baseline_form):
start = time.perf_counter()
generator: report_generator.ReportGenerator = make_app().settings['report_generator']
report = generator.build_report("", baseline_form)
generator: ReportGenerator = make_app().settings['report_generator']
report = generator.build_report("", baseline_form, partial(
concurrent.futures.ThreadPoolExecutor, 1,
))
end = time.perf_counter()
assert report != ""
assert end - start < time_limit
@ -33,4 +37,4 @@ def test_generate_report(baseline_form):
],
)
def test_readable_minutes(test_input, expected):
assert report_generator.readable_minutes(test_input) == expected
assert readable_minutes(test_input) == expected

View file

@ -63,9 +63,7 @@ class TestBasicApp(tornado.testing.AsyncHTTPTestCase):
# but the end time is after the other request (because it takes longer
# to process a report than a simple page).
assert response.start_time < other_response.start_time
# Known fail after reverting in https://gitlab.cern.ch/cara/cara/-/merge_requests/219.
with pytest.raises(AssertionError):
assert end_time(response) > end_time(other_response)
assert end_time(response) > end_time(other_response)
self.assertEqual(response.code, 200)
assert 'CERN HSE' not in response.body.decode()

View file

@ -1,78 +1,85 @@
# Created by installing the cara[app] extra and running:
# echo '.[app]' > requirements.txt
# pip freeze | grep -v cara >> requirements.txt
# echo '.[app]' > requirements.txt
# pip list --format freeze | grep -vi cara | grep -v pip | grep -v setuptools >> requirements.txt
.[app]
anyio==2.1.0
appnope==0.1.2
anyio==3.3.0
argon2-cffi==20.1.0
async-generator==1.10
attrs==20.3.0
attrs==21.2.0
backcall==0.2.0
bleach==3.3.0
certifi==2020.12.5
cffi==1.14.5
contextvars==2.4
bleach==3.3.1
certifi==2021.5.30
cffi==1.14.6
charset-normalizer==2.0.3
cloudpickle==1.6.0
cycler==0.10.0
dataclasses==0.8
decorator==4.4.2
defusedxml==0.6.0
debugpy==1.4.1
decorator==5.0.9
defusedxml==0.7.1
entrypoints==0.3
idna==3.1
immutables==0.15
importlib-metadata==3.5.0
ipykernel==5.5.0
ipympl==0.6.3
ipython==7.16.1
idna==3.2
ipykernel==6.0.3
ipympl==0.7.0
ipython==7.25.0
ipython-genutils==0.2.0
ipywidgets==7.6.3
jedi==0.18.0
Jinja2==2.11.3
Jinja2==3.0.1
joblib==1.0.1
jsonschema==3.2.0
jupyter-client==6.1.11
jupyter-client==6.1.12
jupyter-core==4.7.1
jupyter-server==1.4.1
jupyter-server==1.10.1
jupyterlab-pygments==0.1.2
jupyterlab-widgets==1.0.0
kiwisolver==1.3.1
MarkupSafe==1.1.1
matplotlib==3.3.4
loky==2.9.0
MarkupSafe==2.0.1
matplotlib==3.4.2
matplotlib-inline==0.1.2
memoization==0.3.2
mistune==0.8.4
nbclient==0.5.2
nbconvert==6.0.7
nbformat==5.1.2
nbclient==0.5.3
nbconvert==6.1.0
nbformat==5.1.3
nest-asyncio==1.5.1
notebook==6.2.0
numpy==1.19.5
packaging==20.9
notebook==6.4.0
numpy==1.21.1
packaging==21.0
pandocfilters==1.4.3
parso==0.8.1
parso==0.8.2
pexpect==4.8.0
pickleshare==0.7.5
Pillow==8.1.0
prometheus-client==0.9.0
prompt-toolkit==3.0.16
Pillow==8.3.1
prometheus-client==0.11.0
prompt-toolkit==3.0.19
psutil==5.8.0
ptyprocess==0.7.0
pycparser==2.20
Pygments==2.8.0
Pygments==2.9.0
pyparsing==2.4.7
pyrsistent==0.17.3
python-dateutil==2.8.1
pyzmq==22.0.3
qrcode==6.1
scipy==1.5.4
scikit_learn==0.23.1
Send2Trash==1.5.0
six==1.15.0
pyrsistent==0.18.0
python-dateutil==2.8.2
pyzmq==22.1.0
qrcode==7.2
requests==2.26.0
requests-unixsocket==0.2.0
scikit-learn==0.24.2
scipy==1.7.0
Send2Trash==1.7.1
six==1.16.0
sklearn==0.0
sniffio==1.2.0
terminado==0.9.2
testpath==0.4.4
terminado==0.10.1
testpath==0.5.0
threadpoolctl==2.2.0
tornado==6.1
traitlets==4.3.3
typing-extensions==3.7.4.3
voila==0.2.7
traitlets==5.0.5
urllib3==1.26.6
voila==0.2.10
wcwidth==0.2.5
webencodings==0.5.1
websocket-client==1.1.0
wheel==0.36.2
widgetsnbextension==3.5.1
zipp==3.4.0

View file

@ -5,7 +5,7 @@ addopts = --mypy
[mypy]
no_warn_no_return = True
[mypy-matplotlib.*]
[mypy-loky.*]
ignore_missing_imports = True
[mypy-ipympl.*]
@ -14,6 +14,9 @@ ignore_missing_imports = True
[mypy-ipywidgets.*]
ignore_missing_imports = True
[mypy-matplotlib.*]
ignore_missing_imports = True
[mypy-mistune.*]
ignore_missing_imports = True

View file

@ -23,10 +23,12 @@ REQUIREMENTS: dict = {
'ipympl',
'ipywidgets',
'Jinja2',
'loky',
'matplotlib',
'memoization',
'mistune',
'numpy',
'psutil',
'qrcode[pil]',
'scipy',
'sklearn',
@ -59,7 +61,7 @@ setup(
url='cern.ch/cara',
packages=find_packages(),
python_requires='~=3.6',
python_requires='~=3.9',
classifiers=[
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",