Commit 98072b1b authored by David Mendez's avatar David Mendez
Browse files

Merge branch 'staging' into 'master'

Add missing fields for custom search by ids statistics

See merge request !72
parents acbc08f1 9b38095d
......@@ -75,3 +75,7 @@ class SearchByIDsJobStatistics(Schema):
step_3_time_taken = fields.Number(required=True, validate=validate.Range(min=0))
step_4_time_taken = fields.Number(required=True, validate=validate.Range(min=0))
step_5_time_taken = fields.Number(required=True, validate=validate.Range(min=0))
num_obsolete_items = fields.Number(required=True, validate=validate.Range(min=0))
num_other_excluded_items = fields.Number(required=True, validate=validate.Range(min=0))
num_ids_included_in_results = fields.Number(required=True, validate=validate.Range(min=0))
num_inactive_items = fields.Number(required=True, validate=validate.Range(min=0))
......@@ -48,6 +48,7 @@ def get_job_record_dict(job_type, run_env_type, lsf_host, started_at, finished_a
record_date_field_name: record_date
}
def save_job_record(job_type, run_env_type, lsf_host, started_at, finished_at,
seconds_taken_from_created_to_running, seconds_taken_from_running_to_finished_or_error,
final_state, num_output_files, total_output_bytes, num_input_files, total_input_bytes):
......@@ -107,6 +108,7 @@ def get_job_cache_record_dict(job_type, run_env_type, was_cached, request_date):
'request_date': request_date
}
def save_job_cache_record(job_type, run_env_type, was_cached, request_date):
"""
a dict to be used to save the job cache statistics in the elasticsearch index
......@@ -125,11 +127,11 @@ def save_job_cache_record(job_type, run_env_type, was_cached, request_date):
save_record_to_elasticsearch(job_cache_record_dict, index_name)
# ----------------------------------------------------------------------------------------------------------------------
# Saving records to elasticsearch
# ----------------------------------------------------------------------------------------------------------------------
def save_record_to_elasticsearch(doc, index_name):
dry_run = RUN_CONFIG.get('job_statistics', {}).get('dry_run', False)
es_host = RUN_CONFIG.get('elasticsearch', {}).get('host')
......@@ -139,4 +141,4 @@ def save_record_to_elasticsearch(doc, index_name):
app_logging.debug(f'Sending the following record to the statistics: {doc} '
f'index name: {index_name} es_host: {es_host}')
result = ES.index(index=index_name, body=doc, doc_type='_doc')
app_logging.debug(f'Result {result}')
\ No newline at end of file
app_logging.debug(f'Result {result}')
......@@ -960,6 +960,30 @@ paths:
required: true
type: number
minimum: 0
- name: 'num_obsolete_items'
in: 'formData'
description: "Number of items that were obsolete"
required: true
type: number
minimum: 0
- name: 'num_other_excluded_items'
in: 'formData'
description: "Number of items that were excluded for other reasons, other entity"
required: true
type: number
minimum: 0
- name: 'num_ids_included_in_results'
in: 'formData'
description: "Number of items that were finally included in the results"
required: true
type: number
minimum: 0
- name: 'num_inactive_items'
in: 'formData'
description: "Number of items that were inactive"
required: true
type: number
minimum: 0
responses:
"200":
description: "successful operation"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment