Commit c7836e82 authored by David Mendez's avatar David Mendez

Merge branch 'staging' into 'master'

Redeploy after k8s meltdown

See merge request !2
parents 0cff5266 142d201d
Pipeline #110409 failed with stages
in 11 minutes and 37 seconds
......@@ -127,14 +127,19 @@ dmypy.json
# Pycharm project files
.idea/**
# Jobs run dir and output
jobs_run/**
jobs_output/**
app/static/
# tasks run dir and output
tasks_run/
# Configuration
.env
/config.yml
/config_playground.yml
output.csv
\ No newline at end of file
output.csv
# Configuration
test.db
# Temp path
/tmp
\ No newline at end of file
......@@ -35,11 +35,8 @@ stages:
- prepare_config_deployment
- deploy_to_staging
- test_staging
- deploy_to_prod_hx
- test_prod_hx
- deploy_to_prod_hh
- test_prod_hh
- deploy_to_prod_hx_hh
- test_prod_hx_hh
# ----------------------------------------------------------------------------------------------------------------------
# build
......@@ -171,8 +168,8 @@ deploy_to_staging:
- kubectl config use-context ${CHEMBL_NS_STAGING}
- echo ${CHEMBL_NS_STAGING} ${KUBE_URL_HX} ${KUBE_CLUS_NAME_HX} ${KUBE_USER_HX}
- kubectl get pods -n ${CHEMBL_NS_STAGING}
- kubectl create secret generic ${RUN_CONFIG_SECRET_NAME_STAGING} --from-file=RUN_CONFIG.yml=${CONFIG_STAGING_FILE_PATH} -o yaml -n ${CHEMBL_NS_STAGING} --dry-run=client | kubectl replace -f -
- kubectl create secret generic ${GUNICORN_CONFIG_SECRET_NAME_STAGING} --from-file=GUNICORN_CONFIG.py=${CONFIG_GUNICORN_STAGING_FILE_PATH} -o yaml -n ${CHEMBL_NS_STAGING} --dry-run=client | kubectl replace -f -
- kubectl create secret generic ${RUN_CONFIG_SECRET_NAME_STAGING} --from-file=RUN_CONFIG.yml=${CONFIG_STAGING_FILE_PATH} -o yaml -n ${CHEMBL_NS_STAGING} --dry-run=client | kubectl apply -f -
- kubectl create secret generic ${GUNICORN_CONFIG_SECRET_NAME_STAGING} --from-file=GUNICORN_CONFIG.py=${CONFIG_GUNICORN_STAGING_FILE_PATH} -o yaml -n ${CHEMBL_NS_STAGING} --dry-run=client | kubectl apply -f -
- cat ${CONFIG_STAGING_FILE_PATH}
- cat ${CONFIG_GUNICORN_STAGING_FILE_PATH}
- sed -i "s~<NAMESPACE>~${CHEMBL_NS_STAGING}~" k8s-deployment.yaml
......@@ -186,6 +183,7 @@ deploy_to_staging:
- sed -i "s~<TASKS_DAEMON_IMAGE_TAG>~${TASKS_DAEMON_IMAGE_TAG}~" k8s-deployment.yaml
- sed -i "s~<RUN_CONFIG_SECRET_NAME>~${RUN_CONFIG_SECRET_NAME_STAGING}~" k8s-deployment.yaml
- sed -i "s~<GUNICORN_CONFIG_SECRET_NAME>~${GUNICORN_CONFIG_SECRET_NAME_STAGING}~" k8s-deployment.yaml
- sed -i "s~<NODE_PORT>~${ES_SUBSET_GENERATOR_NODE_PORT_STAGING}~" k8s-deployment.yaml
- cat k8s-deployment.yaml
- kubectl apply -n ${CHEMBL_NS_STAGING} -f k8s-deployment.yaml
- kubectl get pods -n ${CHEMBL_NS_STAGING}
......@@ -195,27 +193,25 @@ deploy_to_staging:
## ----------------------------------------------------------------------------------------------------------------------
## Test staging
## ----------------------------------------------------------------------------------------------------------------------
functional_tests:
functional_tests_staging:
stage: test_staging
image:
name: $SERVER_IMAGE_TAG
entrypoint: [""]
only:
- staging
except:
- schedules
environment:
name: staging
url: ${SERVER_URL_STAGING}
script:
- set -x
- functional_tests/run_functional_tests.py ${SERVER_URL_STAGING}
- functional_tests/run_functional_tests.py ${ES_SUBSET_GENERATOR_URL_STAGING} ${ES_SUBSET_GENERATOR_STAGING_ADMIN_USERNAME} ${ES_SUBSET_GENERATOR_STAGING_ADMIN_PASSWORD} ${ES_PROXY_API_URL_STAGING}
## ---------------------------------------------------------------------------------------------------------------------
## Deployment to prod hx
## ---------------------------------------------------------------------------------------------------------------------
deploy_to_prod_hx:
stage: deploy_to_prod_hx
stage: deploy_to_prod_hx_hh
image:
name: lachlanevenson/k8s-kubectl:latest
entrypoint: ["/bin/sh", "-c"]
......@@ -234,8 +230,8 @@ deploy_to_prod_hx:
- kubectl config use-context ${CHEMBL_NS_PROD}
- echo ${CHEMBL_NS_PROD} ${KUBE_URL_HX} ${KUBE_CLUS_NAME_HX} ${KUBE_USER_HX}
- kubectl get pods -n ${CHEMBL_NS_PROD}
- kubectl create secret generic ${RUN_CONFIG_SECRET_NAME_PROD_HX} --from-file=RUN_CONFIG.yml=${CONFIG_PROD_HX_FILE_PATH} -o yaml -n ${CHEMBL_NS_PROD} --dry-run=client | kubectl replace -f -
- kubectl create secret generic ${GUNICORN_CONFIG_SECRET_NAME_PROD_HX} --from-file=GUNICORN_CONFIG.py=${CONFIG_GUNICORN_PROD_HX_FILE_PATH} -o yaml -n ${CHEMBL_NS_PROD} --dry-run=client | kubectl replace -f -
- kubectl create secret generic ${RUN_CONFIG_SECRET_NAME_PROD_HX} --from-file=RUN_CONFIG.yml=${CONFIG_PROD_HX_FILE_PATH} -o yaml -n ${CHEMBL_NS_PROD} --dry-run=client | kubectl apply -f -
- kubectl create secret generic ${GUNICORN_CONFIG_SECRET_NAME_PROD_HX} --from-file=GUNICORN_CONFIG.py=${CONFIG_GUNICORN_PROD_HX_FILE_PATH} -o yaml -n ${CHEMBL_NS_PROD} --dry-run=client | kubectl apply -f -
- cat ${CONFIG_PROD_HX_FILE_PATH}
- cat ${CONFIG_GUNICORN_PROD_HX_FILE_PATH}
- sed -i "s~<NAMESPACE>~${CHEMBL_NS_PROD}~" k8s-deployment.yaml
......@@ -249,6 +245,7 @@ deploy_to_prod_hx:
- sed -i "s~<TASKS_DAEMON_IMAGE_TAG>~${TASKS_DAEMON_IMAGE_TAG}~" k8s-deployment.yaml
- sed -i "s~<RUN_CONFIG_SECRET_NAME>~${RUN_CONFIG_SECRET_NAME_PROD_HX}~" k8s-deployment.yaml
- sed -i "s~<GUNICORN_CONFIG_SECRET_NAME>~${GUNICORN_CONFIG_SECRET_NAME_PROD_HX}~" k8s-deployment.yaml
- sed -i "s~<NODE_PORT>~${ES_SUBSET_GENERATOR_NODE_PORT_PROD_HX}~" k8s-deployment.yaml
- cat k8s-deployment.yaml
- kubectl apply -n ${CHEMBL_NS_PROD} -f k8s-deployment.yaml
- kubectl get pods -n ${CHEMBL_NS_PROD}
......@@ -256,7 +253,7 @@ deploy_to_prod_hx:
- kubectl apply -n ${CHEMBL_NS_PROD} -f ${AUTOSCALER_PROD_HX_FILE_PATH}
functional_tests_prod_hx:
stage: test_prod_hx
stage: test_prod_hx_hh
image:
name: $SERVER_IMAGE_TAG
entrypoint: [""]
......@@ -267,13 +264,13 @@ functional_tests_prod_hx:
script:
- set -x
- sleep 30 # Give some time for the deployment to take effect.
- functional_tests/run_functional_tests.py ${SERVER_URL_PROD_HX}
- functional_tests/run_functional_tests.py ${ES_SUBSET_GENERATOR_URL_PROD_HX} ${ES_SUBSET_GENERATOR_PROD_HX_ADMIN_USERNAME} ${ES_SUBSET_GENERATOR_PROD_HX_ADMIN_PASSWORD} ${ES_PROXY_API_URL_PROD_HX}
## ----------------------------------------------------------------------------------------------------------------------
## Deployment to prod hh
## ----------------------------------------------------------------------------------------------------------------------
deploy_to_prod_hh:
stage: deploy_to_prod_hh
stage: deploy_to_prod_hx_hh
image:
name: lachlanevenson/k8s-kubectl:latest
entrypoint: ["/bin/sh", "-c"]
......@@ -292,8 +289,8 @@ deploy_to_prod_hh:
- kubectl config use-context ${CHEMBL_NS_PROD}
- echo ${CHEMBL_NS_PROD} ${KUBE_URL_HH} ${KUBE_CLUS_NAME_HH} ${KUBE_USER_HH}
- kubectl get pods -n ${CHEMBL_NS_PROD}
- kubectl create secret generic ${RUN_CONFIG_SECRET_NAME_PROD_HH} --from-file=RUN_CONFIG.yml=${CONFIG_PROD_HH_FILE_PATH} -o yaml -n ${CHEMBL_NS_PROD} --dry-run=client | kubectl replace -f -
- kubectl create secret generic ${GUNICORN_CONFIG_SECRET_NAME_PROD_HH} --from-file=GUNICORN_CONFIG.py=${CONFIG_GUNICORN_PROD_HH_FILE_PATH} -o yaml -n ${CHEMBL_NS_PROD} --dry-run=client | kubectl replace -f -
- kubectl create secret generic ${RUN_CONFIG_SECRET_NAME_PROD_HH} --from-file=RUN_CONFIG.yml=${CONFIG_PROD_HH_FILE_PATH} -o yaml -n ${CHEMBL_NS_PROD} --dry-run=client | kubectl apply -f -
- kubectl create secret generic ${GUNICORN_CONFIG_SECRET_NAME_PROD_HH} --from-file=GUNICORN_CONFIG.py=${CONFIG_GUNICORN_PROD_HH_FILE_PATH} -o yaml -n ${CHEMBL_NS_PROD} --dry-run=client | kubectl apply -f -
- cat ${CONFIG_PROD_HH_FILE_PATH}
- cat ${CONFIG_GUNICORN_PROD_HH_FILE_PATH}
- sed -i "s~<NAMESPACE>~${CHEMBL_NS_PROD}~" k8s-deployment.yaml
......@@ -307,6 +304,7 @@ deploy_to_prod_hh:
- sed -i "s~<CPU_REQUESTED>~${CPU_REQUESTED_PROD_HH}~" k8s-deployment.yaml
- sed -i "s~<RUN_CONFIG_SECRET_NAME>~${RUN_CONFIG_SECRET_NAME_PROD_HH}~" k8s-deployment.yaml
- sed -i "s~<GUNICORN_CONFIG_SECRET_NAME>~${GUNICORN_CONFIG_SECRET_NAME_PROD_HH}~" k8s-deployment.yaml
- sed -i "s~<NODE_PORT>~${ES_PROXY_API_URL_PROD_HH}~" k8s-deployment.yaml
- cat k8s-deployment.yaml
- kubectl apply -n ${CHEMBL_NS_PROD} -f k8s-deployment.yaml
- kubectl get pods -n ${CHEMBL_NS_PROD}
......@@ -314,7 +312,7 @@ deploy_to_prod_hh:
- kubectl apply -n ${CHEMBL_NS_PROD} -f ${AUTOSCALER_PROD_HH_FILE_PATH}
functional_tests_prod_hh:
stage: test_prod_hh
stage: test_prod_hx_hh
image:
name: $SERVER_IMAGE_TAG
entrypoint: [""]
......@@ -323,5 +321,5 @@ functional_tests_prod_hh:
script:
- set -x
- sleep 30 # Give some time for the deployment to take effect.
- functional_tests/run_functional_tests.py ${SERVER_URL_PROD_HH}
- functional_tests/run_functional_tests.py ${ES_SUBSET_GENERATOR_URL_PROD_HH} ${ES_SUBSET_GENERATOR_PROD_HH_ADMIN_USERNAME} ${ES_SUBSET_GENERATOR_PROD_HH_ADMIN_PASSWORD} ${ES_PROXY_API_URL_PROD_HH}
......@@ -9,7 +9,11 @@ RUN apt-get install -y \
curl \
netcat \
iputils-ping \
ssh
ssh \
build-essential \
libpq-dev \
libmemcached-dev \
zlib1g-dev
WORKDIR /app
COPY requirements.txt .
......
......@@ -6,9 +6,12 @@ from flask import Flask
from flask_cors import CORS
from app.config import RUN_CONFIG
# from app.cache import CACHE
from app.cache import CACHE
from app.blueprints.swagger_description.swagger_description_blueprint import SWAGGER_BLUEPRINT
from app.blueprints.subset_generator_blueprint.controllers.subset_generator_controller import SUBSET_GENERATOR_BLUEPRINT
from app.blueprints.admin.controllers.authorisation_controller import ADMIN_AUTH_BLUEPRINT
from app.blueprints.admin.controllers.admin_tasks_controller import ADMIN_TASKS_BLUEPRINT
from app.db import DB
def create_app():
......@@ -24,21 +27,33 @@ def create_app():
if enable_cors:
CORS(flask_app)
flask_app.config['SQLALCHEMY_DATABASE_URI'] = RUN_CONFIG.get('sql_alchemy').get('database_uri')
flask_app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = RUN_CONFIG.get('sql_alchemy').get('track_modifications')
flask_app.config['SECRET_KEY'] = RUN_CONFIG.get('server_secret_key')
# with flask_app.app_context():
# CACHE.init_app(flask_app)
# # pylint: disable=protected-access
# if RUN_CONFIG.get('cache_config').get('CACHE_TYPE') == 'memcached':
# CACHE.cache._client.behaviors['tcp_nodelay'] = True
# CACHE.cache._client.behaviors['_noreply'] = True
# CACHE.cache._client.behaviors['no_block'] = True
# CACHE.cache._client.behaviors['remove_failed'] = 10
# CACHE.cache._client.behaviors['retry_timeout'] = 10
# CACHE.cache._client.behaviors['retry_timeout'] = 600
with flask_app.app_context():
DB.init_app(flask_app)
create_tables = RUN_CONFIG.get('sql_alchemy').get('create_tables', False)
if create_tables:
DB.create_all()
CACHE.init_app(flask_app)
# pylint: disable=protected-access
if RUN_CONFIG.get('cache_config').get('CACHE_TYPE') == 'memcached':
CACHE.cache._client.behaviors['tcp_nodelay'] = True
CACHE.cache._client.behaviors['_noreply'] = True
CACHE.cache._client.behaviors['no_block'] = True
CACHE.cache._client.behaviors['remove_failed'] = 10
CACHE.cache._client.behaviors['retry_timeout'] = 10
CACHE.cache._client.behaviors['retry_timeout'] = 600
flask_app.register_blueprint(SWAGGER_BLUEPRINT, url_prefix=f'{base_path}/swagger')
flask_app.register_blueprint(SUBSET_GENERATOR_BLUEPRINT, url_prefix=f'{base_path}/es_subsets')
flask_app.register_blueprint(ADMIN_AUTH_BLUEPRINT, url_prefix=f'{base_path}/admin')
flask_app.register_blueprint(ADMIN_TASKS_BLUEPRINT, url_prefix=f'{base_path}/admin')
return flask_app
......
"""
Module that handles decorators used in the authorisation of different endpoints.
"""
# pylint: disable=bare-except
from functools import wraps
from flask import request, jsonify
import jwt
from app.config import RUN_CONFIG
def admin_token_required(func):
"""
Checks that a valid admin token is provided.
parameter. Makes the function return a 403 http error if the token is missing, 401 if is invalid.
:param func: function to decorate
:return: decorated function
"""
@wraps(func)
def decorated(*args, **kwargs):
token = request.headers.get('X-Admin-Key')
key = RUN_CONFIG.get('server_secret_key')
if token is None:
return jsonify({'message': 'Token is missing'}), 403
try:
token_data = jwt.decode(token, key, algorithms=['HS256'])
username = token_data.get('username')
if username != RUN_CONFIG.get('admin_username'):
return jsonify({'message': f'You are not authorised for this operation'}), 401
except:
return jsonify({'message': 'Token is invalid'}), 401
return func(*args, **kwargs)
return decorated
"""
Module that handles the generation of tokens for the app
"""
import datetime
import jwt
from app.config import RUN_CONFIG
ADMIN_TOKEN_HOURS_TO_LIVE = 1
def generate_admin_token():
"""
Generates a token that can be used to be authorised for admin tasks
:return: JWT token
"""
token_data = {
'username': RUN_CONFIG.get('admin_username'),
'exp': datetime.datetime.utcnow() + datetime.timedelta(hours=ADMIN_TOKEN_HOURS_TO_LIVE)
}
key = RUN_CONFIG.get('server_secret_key')
token = jwt.encode(token_data, key).decode('UTF-8')
return token
"""
Blueprint for the administrative tasks of the system
"""
from flask import Blueprint, jsonify
from app.blueprints.admin.services import admin_tasks_service
from app.authorisation.decorators import admin_token_required
ADMIN_TASKS_BLUEPRINT = Blueprint('admin_tasks', __name__)
@ADMIN_TASKS_BLUEPRINT.route('/delete_test_tasks_and_indexes', methods=['GET'])
@admin_token_required
def delete_test_tasks_and_indexes():
"""
Triggers the deletion of the tasks that were marked as testing
:return: the result of the operation
"""
operation_result = admin_tasks_service.delete_test_tasks_and_indexes()
return jsonify({'operation_result': operation_result})
@ADMIN_TASKS_BLUEPRINT.route('/delete_expired_tasks', methods=['GET'])
@admin_token_required
def delete_expired_tasks():
"""
Triggers the deletion of the expired tasks
:return: the result of the operation
"""
operation_result = admin_tasks_service.delete_expired_tasks()
return jsonify({'operation_result': operation_result})
"""
Blueprint for authorisation endpoints for the administration of the system
"""
from flask import Blueprint, jsonify, request, make_response
from app.blueprints.admin.services import authorisation_service
ADMIN_AUTH_BLUEPRINT = Blueprint('admin_auth', __name__)
@ADMIN_AUTH_BLUEPRINT.route('/login', methods=['GET'])
def login():
"""
logs in the admin user
:return: a token if the password provided is valid
"""
auth = request.authorization
if auth is None:
return make_response('No login credentials were provided!', 400,
{'WWW-Authenticate': 'Basic realm="Login Required'})
try:
token = authorisation_service.get_admin_token(auth.username, auth.password)
return jsonify({'token': token})
except authorisation_service.InvalidCredentialsError as error:
return make_response(str(error), 401, {'WWW-Authenticate': 'Basic realm="Login Required'})
"""
Module that provide services for administrative tasks of the system
"""
from app.tasks.models import task_models
def delete_test_tasks_and_indexes():
"""
Deletes all the test tasks and indexes
:return: a message (string) with the result of the operation
"""
num_deleted = task_models.delete_test_tasks_and_indexes()
return f'Deleted {num_deleted} test tasks and their respective indexes'
def delete_expired_tasks():
"""
Deletes all the expired tasks and indexes
:return: a message (string) with the result of the operation
"""
num_deleted = task_models.delete_all_expired_tasks()
return f'Deleted {num_deleted} expired tasks and their respective indexes'
"""
This module checks the login info provided and returns a token if the credentials are valid
"""
from app.config import RUN_CONFIG
from app.config import verify_secret
from app.authorisation import token_generator
class InvalidCredentialsError(Exception):
"""Base class for exceptions in this module."""
def get_admin_token(username, password):
"""
:param username: admin username
:param password: admin password
:return: a token to be used in administrative functions
"""
username_must_be = RUN_CONFIG.get('admin_username')
username_is_correct = username_must_be == username
password_is_correct = verify_secret('admin_password', password)
if username_is_correct and password_is_correct:
return token_generator.generate_admin_token()
raise InvalidCredentialsError('Could not verify the credentials provided!')
......@@ -10,6 +10,25 @@ class SubmitSubsetGenerationRequest(Schema):
"""
origin_index = fields.String(required=True)
items_ids = fields.String(required=True, many=True)
admin__test_run = fields.Boolean()
class SubmitSubsetGenerationRequestFromCompressedIDs(Schema):
"""
Class with the schema for submitting the generation of a subset
"""
origin_index = fields.String(required=True)
compressed_items_ids = fields.String(required=True, many=True)
admin__test_run = fields.Boolean()
class SubmitSubsetGenerationRequestFromFile(Schema):
"""
Class with the schema for submitting the generation of a subset from an uploaded file
"""
origin_index = fields.String(required=True)
compression = fields.String(required=True)
admin__test_run = fields.Boolean()
class SubsetGenerationStatusRequest(Schema):
......@@ -17,3 +36,10 @@ class SubsetGenerationStatusRequest(Schema):
Class with the schema for requesting the status of a subset generation task
"""
task_id = fields.String(required=True)
class IDsWithNoMatchRequest(Schema):
"""
Class with the schema for requesting the ids with no match of a task
"""
task_id = fields.String(required=True)
......@@ -20,14 +20,71 @@ def submit_subset_creation_from_ids():
form_data = request.form
origin_index = form_data.get('origin_index')
items_ids = form_data.get('items_ids')
items_ids = form_data.get('items_ids').split(',')
test_run = bool(form_data.get('admin__test_run', False))
app_logging.debug(f'origin_index: {origin_index}')
app_logging.debug(f'items_ids: {items_ids}')
try:
submission_response = subset_generator_service.submit_subset_generation_from_ids(origin_index, items_ids)
submission_response = subset_generator_service.submit_subset_generation_from_ids(origin_index, items_ids,
test_run)
return jsonify(submission_response)
except subset_generator_service.SubsetGeneratorServiceError as error:
abort(500, msg=f'Internal server error: {str(error)}')
@SUBSET_GENERATOR_BLUEPRINT.route('/submit_subset_creation_from_compressed_ids', methods=['POST'])
@validate_form_with(marshmallow_schemas.SubmitSubsetGenerationRequestFromCompressedIDs)
def submit_subset_creation_from_compressed_ids():
"""
:return: the json response with the id of the subset creation process
"""
form_data = request.form
origin_index = form_data.get('origin_index')
compressed_items_ids = form_data.get('compressed_items_ids')
test_run = bool(form_data.get('admin__test_run', False))
app_logging.debug(f'origin_index: {origin_index}')
app_logging.debug(f'compressed_items_ids: {compressed_items_ids}')
try:
submission_response = subset_generator_service.submit_subset_generation_from_compressed_ids(
origin_index,
compressed_items_ids,
test_run)
return jsonify(submission_response)
except subset_generator_service.SubsetGeneratorServiceError as error:
abort(500, msg=f'Internal server error: {str(error)}')
@SUBSET_GENERATOR_BLUEPRINT.route('/submit_subset_creation_from_file', methods=['POST'])
@validate_form_with(marshmallow_schemas.SubmitSubsetGenerationRequestFromFile)
def submit_subset_creation_from_ids_file():
"""
:return: the json response with the id of the subset creation process
"""
form_data = request.form
form_files = request.files
origin_index = form_data.get('origin_index')
compression = form_data.get('compression')
test_run = bool(form_data.get('admin__test_run', False))
app_logging.debug(f'origin_index: {origin_index}')
app_logging.debug(f'Need to read ids from file')
try:
submission_response = subset_generator_service.submit_subset_generation_from_ids_file(origin_index, form_files,
compression, test_run)
return jsonify(submission_response)
except subset_generator_service.SubsetGeneratorServiceError as error:
......@@ -52,3 +109,22 @@ def get_task_status(task_id):
except subset_generator_service.SubsetGenerationTaskNotFoundError as error:
abort(404, repr(error))
@SUBSET_GENERATOR_BLUEPRINT.route('/get_ids_with_no_match/<task_id>', methods=['GET'])
@validate_url_params_with(marshmallow_schemas.IDsWithNoMatchRequest)
def get_ids_with_no_match(task_id):
"""
:param task_id: id of the task to check
:return: json response with the ids that didn't match any item in the origin index
"""
try:
ids_with_no_match_response = subset_generator_service.get_ids_with_no_match(task_id)
return jsonify(ids_with_no_match_response)
except subset_generator_service.SubsetGeneratorServiceError as error:
abort(500, msg=f'Internal server error: {str(error)}')
except subset_generator_service.SubsetGenerationTaskNotFoundError as error:
abort(404, repr(error))
"""
Subset generator service
"""
import gzip
import base64
import requests
from app import app_logging
from app.config import RUN_CONFIG
from app.es_subset_generator import subset_generator
from app.tasks.models import task_models
class SubsetGeneratorServiceError(Exception):
......@@ -11,31 +20,141 @@ class SubsetGenerationTaskNotFoundError(Exception):
"""Base class for exceptions in the subset generation service."""
def submit_subset_generation_from_ids(origin_index, items_ids):
def submit_subset_generation_from_ids(origin_index, items_ids, test_run=False):
"""
Starts the process of the generation of the subset based on the ids passed as parameter
:param origin_index: source index for the subset
:param items_ids: ids of the items to include
:param test_run: If true, tells me to mark the task as test so it can be easily cleaned up
:return: a dict with the id of the task to check it's progress
"""
fields_list = get_all_properties_list_for_index(origin_index)
task_id = subset_generator.prepare_index_and_create_reindex_task(origin_index, items_ids, fields_list, test_run)
return {
'task_id': task_id
}
def submit_subset_generation_from_compressed_ids(origin_index, compressed_items_ids, test_run=False):
"""
Starts the process of the generation of the subset based on the ids passed as parameter
:param origin_index: source index for the subset
:param compressed_items_ids: Ids compressed with the GZIP algorithm, in a base64 format
:param test_run: If true, tells me to mark the task as test so it can be easily cleaned up
:return: a dict with the id of the task to check it's progress
"""
base64_bytes = base64.b64decode(compressed_items_ids)
decompressed_bytes = gzip.decompress(base64_bytes)
raw_ids = decompressed_bytes.decode()
items_ids = raw_ids.split(',')