Merge branch 'feature/schema_updates' into 'master'

Data Service - Schema Update

See merge request caimira/caimira!482
This commit is contained in:
Luis Aleixo 2024-02-14 14:49:19 +01:00
commit 0590a27fdc
10 changed files with 49 additions and 219 deletions

View file

@ -352,16 +352,7 @@ $ oc create secret generic \
The CERN data service collects data from various sources and expose them via a REST API endpoint.
Create secret:
```console
$ read DATA_SERVICE_CLIENT_EMAIL
$ read DATA_SERVICE_CLIENT_PASSWORD
$ oc create secret generic \
--from-literal="DATA_SERVICE_CLIENT_EMAIL=$DATA_SERVICE_CLIENT_EMAIL" \
--from-literal="DATA_SERVICE_CLIENT_PASSWORD=$DATA_SERVICE_CLIENT_PASSWORD" \
data-service-api
```
To enable the service set the environment variable `DATA_SERVICE_ENABLED` as `True`.
## Update configuration

View file

@ -24,8 +24,6 @@ if [[ "$APP_NAME" == "calculator-app" ]]; then
export "EXTRA_PAGES"="$EXTRA_PAGES"
export "DATA_SERVICE_ENABLED"="${DATA_SERVICE_ENABLED:=False}"
export "DATA_SERVICE_CLIENT_EMAIL"="$DATA_SERVICE_CLIENT_EMAIL"
export "DATA_SERVICE_CLIENT_PASSWORD"="$DATA_SERVICE_CLIENT_PASSWORD"
echo "Starting the caimira webservice with: python -m caimira.apps.calculator ${args[@]}"
python -m caimira.apps.calculator "${args[@]}"

View file

@ -285,16 +285,6 @@
name: arve-api
- name: DATA_SERVICE_ENABLED
value: 'False'
- name: DATA_SERVICE_CLIENT_EMAIL
valueFrom:
secretKeyRef:
key: DATA_SERVICE_CLIENT_EMAIL
name: data-service-api
- name: DATA_SERVICE_CLIENT_PASSWORD
valueFrom:
secretKeyRef:
key: DATA_SERVICE_CLIENT_PASSWORD
name: data-service-api
image: '${PROJECT_NAME}/calculator-app'
ports:
- containerPort: 8080

View file

@ -494,12 +494,7 @@ def make_app(
data_service = None
data_service_enabled = os.environ.get("DATA_SERVICE_ENABLED", "False")
is_enabled = data_service_enabled.lower() == "true"
if is_enabled:
credentials = {
"email": os.environ.get("DATA_SERVICE_CLIENT_EMAIL", None),
"password": os.environ.get("DATA_SERVICE_CLIENT_PASSWORD", None),
}
data_service = DataService.create(credentials)
if is_enabled: data_service = DataService.create()
return Application(
urls,

View file

@ -1,5 +1,3 @@
import typing
# ------------------ Default form values ----------------------
# Used to declare when an attribute of a class must have a value provided, and

View file

@ -411,7 +411,6 @@ class VirusFormData(FormData):
activity_defn = self.data_registry.population_scenario_activity[self.activity_type]['activity']
expiration_defn = self.data_registry.population_scenario_activity[self.activity_type]['expiration']
if (self.activity_type == 'smallmeeting'):
# Conversation of N people is approximately 1/N% of the time speaking.
expiration_defn = {'Speaking': 1, 'Breathing': self.total_people - 1}

View file

@ -60,23 +60,6 @@ def custom_distribution_lookup(dict: dict, key_part: str) -> typing.Any:
return f"Key '{key_part}' not found."
def evaluate_reference(reference_variable: str) -> typing.Any:
"""
Evaluate a reference variable.
Args:
reference_variable (str): The variable to evaluate.
Returns:
Any: The evaluated value or an error message if the variable is not defined.
"""
try:
return eval(reference_variable)
except NameError:
return f"Variable '{reference_variable}' is not defined."
def evaluate_custom_distribution(dist: str, params: typing.Dict) -> typing.Any:
"""
Evaluate a custom distribution.
@ -92,13 +75,13 @@ def evaluate_custom_distribution(dist: str, params: typing.Dict) -> typing.Any:
ValueError: If the distribution type is not recognized.
"""
if dist == 'Numpy Linear Space (linspace)':
if dist == 'Linear Space':
return np.linspace(params['start'], params['stop'], params['num'])
elif dist == 'Numpy Normal Distribution (random.normal)':
return Normal(params['mean_gaussian'], params['standard_deviation_gaussian'])
elif dist == 'Numpy Log-normal Distribution (random.lognormal)':
return LogNormal(params['mean_gaussian'], params['standard_deviation_gaussian'])
elif dist == 'Numpy Uniform Distribution (random.uniform)':
elif dist == 'Normal':
return Normal(params['normal_mean_gaussian'], params['normal_standard_deviation_gaussian'])
elif dist == 'Log-normal':
return LogNormal(params['lognormal_mean_gaussian'], params['lognormal_standard_deviation_gaussian'])
elif dist == 'Uniform':
return Uniform(params['low'], params['high'])
else:
raise ValueError('Bad request - distribution not found.')

View file

@ -26,81 +26,81 @@ class DataRegistry:
activity_distributions = {
"Seated": {
"inhalation_rate": {
"associated_distribution": "Numpy Log-normal Distribution (random.lognormal)",
"associated_distribution": "Log-normal",
"parameters": {
"mean_gaussian": -0.6872121723362303,
"standard_deviation_gaussian": 0.10498338229297108,
"lognormal_mean_gaussian": -0.6872121723362303,
"lognormal_standard_deviation_gaussian": 0.10498338229297108,
},
},
"exhalation_rate": {
"associated_distribution": "Numpy Log-normal Distribution (random.lognormal)",
"associated_distribution": "Log-normal",
"parameters": {
"mean_gaussian": -0.6872121723362303,
"standard_deviation_gaussian": 0.10498338229297108,
"lognormal_mean_gaussian": -0.6872121723362303,
"lognormal_standard_deviation_gaussian": 0.10498338229297108,
},
},
},
"Standing": {
"inhalation_rate": {
"associated_distribution": "Numpy Log-normal Distribution (random.lognormal)",
"associated_distribution": "Log-normal",
"parameters": {
"mean_gaussian": -0.5742377578494785,
"standard_deviation_gaussian": 0.09373162411398223,
"lognormal_mean_gaussian": -0.5742377578494785,
"lognormal_standard_deviation_gaussian": 0.09373162411398223,
},
},
"exhalation_rate": {
"associated_distribution": "Numpy Log-normal Distribution (random.lognormal)",
"associated_distribution": "Log-normal",
"parameters": {
"mean_gaussian": -0.5742377578494785,
"standard_deviation_gaussian": 0.09373162411398223,
"lognormal_mean_gaussian": -0.5742377578494785,
"lognormal_standard_deviation_gaussian": 0.09373162411398223,
},
},
},
"Light activity": {
"inhalation_rate": {
"associated_distribution": "Numpy Log-normal Distribution (random.lognormal)",
"associated_distribution": "Log-normal",
"parameters": {
"mean_gaussian": 0.21380242785625422,
"standard_deviation_gaussian": 0.09435378091059601,
"lognormal_mean_gaussian": 0.21380242785625422,
"lognormal_standard_deviation_gaussian": 0.09435378091059601,
},
},
"exhalation_rate": {
"associated_distribution": "Numpy Log-normal Distribution (random.lognormal)",
"associated_distribution": "Log-normal",
"parameters": {
"mean_gaussian": 0.21380242785625422,
"standard_deviation_gaussian": 0.09435378091059601,
"lognormal_mean_gaussian": 0.21380242785625422,
"lognormal_standard_deviation_gaussian": 0.09435378091059601,
},
},
},
"Moderate activity": {
"inhalation_rate": {
"associated_distribution": "Numpy Log-normal Distribution (random.lognormal)",
"associated_distribution": "Log-normal",
"parameters": {
"mean_gaussian": 0.551771330362601,
"standard_deviation_gaussian": 0.1894616357138137,
"lognormal_mean_gaussian": 0.551771330362601,
"lognormal_standard_deviation_gaussian": 0.1894616357138137,
},
},
"exhalation_rate": {
"associated_distribution": "Numpy Log-normal Distribution (random.lognormal)",
"associated_distribution": "Log-normal",
"parameters": {
"mean_gaussian": 0.551771330362601,
"standard_deviation_gaussian": 0.1894616357138137,
"lognormal_mean_gaussian": 0.551771330362601,
"lognormal_standard_deviation_gaussian": 0.1894616357138137,
},
},
},
"Heavy exercise": {
"inhalation_rate": {
"associated_distribution": "Numpy Log-normal Distribution (random.lognormal)",
"associated_distribution": "Log-normal",
"parameters": {
"mean_gaussian": 1.1644665696723049,
"standard_deviation_gaussian": 0.21744554768657565,
"lognormal_mean_gaussian": 1.1644665696723049,
"lognormal_standard_deviation_gaussian": 0.21744554768657565,
},
},
"exhalation_rate": {
"associated_distribution": "Numpy Log-normal Distribution (random.lognormal)",
"associated_distribution": "Log-normal",
"parameters": {
"mean_gaussian": 1.1644665696723049,
"standard_deviation_gaussian": 0.21744554768657565,
"lognormal_mean_gaussian": 1.1644665696723049,
"lognormal_standard_deviation_gaussian": 0.21744554768657565,
},
},
},
@ -262,7 +262,7 @@ class DataRegistry:
mask_distributions = {
"Type I": {
"η_inhale": {
"associated_distribution": "Numpy Uniform Distribution (random.uniform)",
"associated_distribution": "Uniform",
"parameters": {
"low": 0.25,
"high": 0.80,
@ -273,7 +273,7 @@ class DataRegistry:
},
"FFP2": {
"η_inhale": {
"associated_distribution": "Numpy Uniform Distribution (random.uniform)",
"associated_distribution": "Uniform",
"parameters": {
"low": 0.83,
"high": 0.91,
@ -284,7 +284,7 @@ class DataRegistry:
},
"Cloth": {
"η_inhale": {
"associated_distribution": "Numpy Uniform Distribution (random.uniform)",
"associated_distribution": "Uniform",
"parameters": {
"low": 0.05,
"high": 0.40,
@ -292,7 +292,7 @@ class DataRegistry:
},
"Known filtration efficiency of masks when exhaling?": "Yes",
"η_exhale": {
"associated_distribution": "Numpy Uniform Distribution (random.uniform)",
"associated_distribution": "Uniform",
"parameters": {
"low": 0.20,
"high": 0.50,
@ -392,7 +392,7 @@ class DataRegistry:
"office": {"activity": "Seated", "expiration": {"Speaking": 1, "Breathing": 2}},
"smallmeeting": {
"activity": "Seated",
"expiration": {"Speaking": 1, "Breathing": None},
"expiration": {"Speaking": 1},
},
"largemeeting": {
"activity": "Standing",

View file

@ -1,8 +1,5 @@
import logging
import typing
from datetime import datetime, timedelta, timezone
import jwt
import requests
from caimira.store.data_registry import DataRegistry
@ -18,78 +15,18 @@ class DataService:
def __init__(
self,
credentials: typing.Dict[str, typing.Optional[str]],
host: str,
):
self._credentials = credentials
self._host = host
@classmethod
def create(cls, credentials: typing.Dict[str, typing.Optional[str]], host: str = "https://caimira-data-api.app.cern.ch"):
def create(cls, host: str = "https://caimira-data-api.app.cern.ch"):
"""Factory."""
return cls(credentials, host)
def _is_valid(self, access_token):
"""Return True if the expiration token is still valid."""
try:
decoded = jwt.decode(
access_token, algorithms=["HS256"], options={"verify_signature": False}
)
expiration_timestamp = decoded["exp"]
expiration = datetime.utcfromtimestamp(expiration_timestamp).replace(
tzinfo=timezone.utc
)
now = datetime.now(timezone.utc)
is_valid = now < expiration - timedelta(
seconds=5
) # 5 seconds time delta to avoid timing issues
logger.debug(f"Access token expiration: {expiration_timestamp}. Is valid? {is_valid}")
return is_valid
except jwt.ExpiredSignatureError:
logger.warning("JWT token expired.")
except jwt.InvalidTokenError:
logger.warning("JWT token invalid.")
return False
def _login(self):
logger.debug(f"Access token: {self._access_token}")
if self._access_token and self._is_valid(self._access_token):
return self._access_token
# invalid access_token, fetch it again
client_email = self._credentials["email"]
client_password = self._credentials["password"]
if client_email == None or client_password == None:
# If the credentials are not defined, an exception is raised.
raise Exception("DataService credentials not set")
url = f"{self._host}/login"
headers = {"Content-Type": "application/json"}
json_body = dict(email=client_email, password=client_password)
try:
response = requests.post(url, json=json_body, headers=headers)
response.raise_for_status()
if response.status_code == 200:
self._access_token = response.json()["access_token"]
logger.debug(f"Obtained new access token: {self._access_token}")
return self._access_token
else:
logger.error(
f"Unexpected error on login. Response status code: {response.status_code}, body: f{response.text}"
)
except requests.exceptions.RequestException as e:
logger.exception(e)
return cls(host)
def _fetch(self):
access_token = self._login()
headers = {
"Authorization": f"Bearer {access_token}",
"Content-Type": "application/json",
}
url = f"{self._host}/data"

View file

@ -1,78 +1,20 @@
import time
import unittest
from unittest.mock import Mock, patch
import jwt
from caimira.store.data_service import DataService
class DataServiceTests(unittest.TestCase):
def setUp(self):
# Set up any necessary test data or configurations
self.credentials = {"email": "test@example.com", "password": "password123"}
self.data_service = DataService.create(self.credentials, host="https://dataservice.example.com")
def test_jwt_expiration(self):
is_valid = self.data_service._is_valid(None)
self.assertFalse(is_valid)
now = time.time()
encoded = jwt.encode({"exp": now - 10}, "very secret", algorithm="HS256")
is_valid = self.data_service._is_valid(encoded)
self.assertFalse(is_valid)
encoded = jwt.encode({"exp": now}, "very secret", algorithm="HS256")
is_valid = self.data_service._is_valid(encoded)
self.assertFalse(is_valid)
encoded = jwt.encode({"exp": now + 10}, "very secret", algorithm="HS256")
is_valid = self.data_service._is_valid(encoded)
self.assertTrue(is_valid)
@patch("requests.post")
def test_login_successful(self, mock_post):
# Mock successful login response
mock_response = Mock()
mock_response.status_code = 200
mock_response.json.return_value = {"access_token": "dummy_token"}
mock_post.return_value = mock_response
# Call the login method
access_token = self.data_service._login()
# Assert that the access token is returned correctly
self.assertEqual(access_token, "dummy_token")
# Verify that the fetch method was called with the expected arguments
mock_post.assert_called_once_with(
"https://dataservice.example.com/login",
json=dict(email="test@example.com", password="password123"),
headers={"Content-Type": "application/json"},
)
@patch("requests.post")
def test_login_error(self, mock_post):
# Mock login error response
mock_post.return_value = Mock()
mock_post.return_value.status_code = 500
# Call the login method
access_token = self.data_service._login()
# Assert that the login method returns None in case of an error
self.assertIsNone(access_token)
self.data_service = DataService.create(host="https://dataservice.example.com")
@patch("requests.get")
@patch.object(DataService, "_login")
def test_fetch_successful(self, mock_login, mock_get):
def test_fetch_successful(self, mock_get):
# Mock successful fetch response
mock_get.return_value = Mock()
mock_get.return_value.status_code = 200
mock_get.return_value.json.return_value = {"data": "dummy_data"}
# Call the fetch method with a mock access token
mock_login.return_value = "dummy_token"
data = self.data_service._fetch()
# Assert that the data is returned correctly
@ -82,20 +24,17 @@ class DataServiceTests(unittest.TestCase):
mock_get.assert_called_once_with(
"https://dataservice.example.com/data",
headers={
"Authorization": "Bearer dummy_token",
"Content-Type": "application/json",
},
)
@patch("requests.get")
@patch.object(DataService, "_login")
def test_fetch_error(self, mock_login, mock_get):
def test_fetch_error(self, mock_get):
# Mock fetch error response
mock_get.return_value = Mock()
mock_get.return_value.status_code = 500
# Call the fetch method with a mock access token
mock_login.return_value = "dummy_token"
# Call the fetch method
data = self.data_service._fetch()
# Assert that the fetch method returns None in case of an error