diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 26fed7085..18b9f39b9 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 6.8.0 +current_version = 6.9.0 commit = True tag = False parse = (?P\d+)\.(?P\d+)\.(?P[A-z0-9-]+) diff --git a/api/graphql/schema.py b/api/graphql/schema.py index c87d19b88..eec4b0ef9 100644 --- a/api/graphql/schema.py +++ b/api/graphql/schema.py @@ -843,6 +843,24 @@ async def my_projects(self, info: Info) -> list[GraphQLProject]: ) return [GraphQLProject.from_internal(p) for p in projects] + @strawberry.field + async def analysis_runner( + self, + info: Info, + ar_guid: str, + ) -> GraphQLAnalysisRunner: + if not ar_guid: + raise ValueError('Must provide ar_guid') + connection = info.context['connection'] + alayer = AnalysisRunnerLayer(connection) + filter_ = AnalysisRunnerFilter(ar_guid=GenericFilter(eq=ar_guid)) + analysis_runners = await alayer.query(filter_) + if len(analysis_runners) != 1: + raise ValueError( + f'Expected exactly one analysis runner expected, found {len(analysis_runners)}' + ) + return GraphQLAnalysisRunner.from_internal(analysis_runners[0]) + schema = strawberry.Schema( query=Query, mutation=None, extensions=[QueryDepthLimiter(max_depth=10)] diff --git a/api/routes/analysis_runner.py b/api/routes/analysis_runner.py index ff2fede24..2ee383a55 100644 --- a/api/routes/analysis_runner.py +++ b/api/routes/analysis_runner.py @@ -25,13 +25,13 @@ async def create_analysis_runner_log( # pylint: disable=too-many-arguments description: str, driver_image: str, config_path: str, - cwd: str | None, environment: str, - hail_version: str | None, batch_url: str, submitting_user: str, meta: dict[str, str], output_path: str, + hail_version: str | None = None, + cwd: str | None = None, connection: Connection = get_project_write_connection, ) -> str: """Create a new analysis runner log""" diff --git a/api/routes/billing.py b/api/routes/billing.py index 3a830197d..3b5a881ec 100644 --- a/api/routes/billing.py +++ b/api/routes/billing.py @@ -12,7 +12,7 @@ from db.python.layers.billing import BillingLayer from models.enums import BillingSource from models.models import ( - BillingBatchCostRecord, + AnalysisCostRecord, BillingColumn, BillingCostBudgetRecord, BillingTotalCostQueryModel, @@ -276,7 +276,7 @@ async def get_namespaces( @router.get( '/cost-by-ar-guid/{ar_guid}', - response_model=list[BillingBatchCostRecord], + response_model=list[AnalysisCostRecord], operation_id='costByArGuid', ) @alru_cache(maxsize=10, ttl=BILLING_CACHE_RESPONSE_TTL) @@ -294,7 +294,7 @@ async def get_cost_by_ar_guid( @router.get( '/cost-by-batch-id/{batch_id}', - response_model=list[BillingBatchCostRecord], + response_model=list[AnalysisCostRecord], operation_id='costByBatchId', ) @alru_cache(maxsize=10, ttl=BILLING_CACHE_RESPONSE_TTL) diff --git a/api/server.py b/api/server.py index b5d58209b..cdc03e669 100644 --- a/api/server.py +++ b/api/server.py @@ -19,7 +19,7 @@ from db.python.utils import get_logger # This tag is automatically updated by bump2version -_VERSION = '6.8.0' +_VERSION = '6.9.0' logger = get_logger() diff --git a/api/utils/db.py b/api/utils/db.py index 5d88a2b1b..c574c1278 100644 --- a/api/utils/db.py +++ b/api/utils/db.py @@ -1,3 +1,4 @@ +import json import logging from os import getenv @@ -29,6 +30,19 @@ def get_ar_guid(request: Request) -> str | None: return request.headers.get('sm-ar-guid') +def get_extra_audit_log_values(request: Request) -> dict | None: + """Get a JSON encoded dictionary from the 'sm-extra-values' header if it exists""" + headers = request.headers.get('sm-extra-values') + if not headers: + return None + + try: + return json.loads(headers) + except json.JSONDecodeError: + logging.error(f'Could not parse sm-extra-values: {headers}') + return None + + def get_on_behalf_of(request: Request) -> str | None: """ Get sm-on-behalf-of if there are requests that were performed on behalf of @@ -69,12 +83,16 @@ async def dependable_get_write_project_connection( request: Request, author: str = Depends(authenticate), ar_guid: str = Depends(get_ar_guid), + extra_values: dict | None = Depends(get_extra_audit_log_values), on_behalf_of: str | None = Depends(get_on_behalf_of), ) -> Connection: """FastAPI handler for getting connection WITH project""" meta = {'path': request.url.path} if request.client: meta['ip'] = request.client.host + if extra_values: + meta.update(extra_values) + return await ProjectPermissionsTable.get_project_connection( project_name=project, author=author, @@ -89,14 +107,20 @@ async def dependable_get_readonly_project_connection( project: str, author: str = Depends(authenticate), ar_guid: str = Depends(get_ar_guid), + extra_values: dict | None = Depends(get_extra_audit_log_values), ) -> Connection: """FastAPI handler for getting connection WITH project""" + meta = {} + if extra_values: + meta.update(extra_values) + return await ProjectPermissionsTable.get_project_connection( project_name=project, author=author, readonly=True, on_behalf_of=None, ar_guid=ar_guid, + meta=meta, ) @@ -104,12 +128,16 @@ async def dependable_get_connection( request: Request, author: str = Depends(authenticate), ar_guid: str = Depends(get_ar_guid), + extra_values: dict | None = Depends(get_extra_audit_log_values), ): """FastAPI handler for getting connection withOUT project""" meta = {'path': request.url.path} if request.client: meta['ip'] = request.client.host + if extra_values: + meta.update(extra_values) + return await SMConnections.get_connection_no_project( author, ar_guid=ar_guid, meta=meta ) diff --git a/db/project.xml b/db/project.xml index 788467cc7..deafffaff 100644 --- a/db/project.xml +++ b/db/project.xml @@ -1178,14 +1178,24 @@ ALTER TABLE analysis_runner ADD SYSTEM VERSIONING; + + + + + SET @@system_versioning_alter_history = 1; + ALTER TABLE analysis_runner MODIFY COLUMN config_path VARCHAR(255) NULL; + ALTER TABLE analysis_runner MODIFY COLUMN audit_log_id INT NULL; + + + INSERT INTO analysis_runner ( ar_guid, project, timestamp, access_level, repository, `commit`, output_path, script, description, driver_image, config_path, cwd, environment, hail_version, batch_url, submitting_user, meta, audit_log_id ) SELECT COALESCE(JSON_UNQUOTE(JSON_EXTRACT(analysis.meta, '$.ar-guid')), UUID()) as ar_guid, analysis.project as project, - STR_TO_DATE(REPLACE(JSON_UNQUOTE(JSON_EXTRACT(analysis.meta, '$.timestamp')), 'T', ' '), - '%Y-%m-%d %H:%i:%s.%f') as timestamp, JSON_UNQUOTE(JSON_EXTRACT(analysis.meta, + CONVERT(JSON_UNQUOTE(JSON_EXTRACT(analysis.meta, '$.timestamp')), DATETIME) as timestamp, + JSON_UNQUOTE(JSON_EXTRACT(analysis.meta, '$.accessLevel')) as access_level, JSON_UNQUOTE(JSON_EXTRACT(analysis.meta, '$.repo')) as repository, JSON_UNQUOTE(JSON_EXTRACT(analysis.meta, '$.commit')) as `commit`, analysis.output as output_path, JSON_UNQUOTE(JSON_EXTRACT(analysis.meta, '$.script')) as @@ -1193,7 +1203,7 @@ JSON_UNQUOTE(JSON_EXTRACT(analysis.meta, '$.driverImage')) as driver_image, JSON_UNQUOTE(JSON_EXTRACT(analysis.meta, '$.configPath')) as config_path, JSON_UNQUOTE(JSON_EXTRACT(analysis.meta, '$.cwd')) as cwd, - JSON_UNQUOTE(JSON_EXTRACT(analysis.meta, '$.environment')) as environment, + COALESCE(JSON_UNQUOTE(JSON_EXTRACT(analysis.meta, '$.environment')), 'gcp') as environment, JSON_UNQUOTE(JSON_EXTRACT(analysis.meta, '$.hailVersion')) as hail_version, JSON_UNQUOTE(JSON_EXTRACT(analysis.meta, '$.batch_url')) as batch_url, COALESCE(audit_log.on_behalf_of, analysis.author) as submitting_user, JSON_REMOVE( diff --git a/db/python/layers/billing.py b/db/python/layers/billing.py index c92692c2c..c3512202a 100644 --- a/db/python/layers/billing.py +++ b/db/python/layers/billing.py @@ -6,7 +6,7 @@ from db.python.tables.bq.billing_raw import BillingRawTable from models.enums import BillingSource from models.models import ( - BillingBatchCostRecord, + AnalysisCostRecord, BillingColumn, BillingCostBudgetRecord, BillingTotalCostQueryModel, @@ -204,7 +204,7 @@ async def get_running_cost( async def get_cost_by_ar_guid( self, ar_guid: str | None = None, - ) -> list[BillingBatchCostRecord]: + ) -> list[AnalysisCostRecord]: """ Get Costs by AR GUID """ @@ -229,7 +229,7 @@ async def get_cost_by_ar_guid( async def get_cost_by_batch_id( self, batch_id: str | None = None, - ) -> list[BillingBatchCostRecord]: + ) -> list[AnalysisCostRecord]: """ Get Costs by Batch ID """ diff --git a/db/python/tables/bq/billing_daily_extended.py b/db/python/tables/bq/billing_daily_extended.py index 5e2d2bf3c..931f28a94 100644 --- a/db/python/tables/bq/billing_daily_extended.py +++ b/db/python/tables/bq/billing_daily_extended.py @@ -7,7 +7,7 @@ BillingBaseTable, time_optimisation_parameter, ) -from models.models import BillingBatchCostRecord, BillingColumn +from models.models import AnalysisCostRecord, BillingColumn class BillingDailyExtendedTable(BillingBaseTable): @@ -60,7 +60,7 @@ async def get_batch_cost_summary( end_time: datetime, batch_ids: list[str] | None, ar_guid: str | None, - ) -> list[BillingBatchCostRecord]: + ) -> list[AnalysisCostRecord]: """ Get summary of AR run """ @@ -361,9 +361,7 @@ async def get_batch_cost_summary( query_job_result = self._execute_query(_query, query_parameters, False) if query_job_result: - return [ - BillingBatchCostRecord.from_json(dict(row)) for row in query_job_result - ] + return [AnalysisCostRecord.from_dict(dict(row)) for row in query_job_result] # return empty list if no record found return [] diff --git a/deploy/python/version.txt b/deploy/python/version.txt index e029aa99b..97f578152 100644 --- a/deploy/python/version.txt +++ b/deploy/python/version.txt @@ -1 +1 @@ -6.8.0 +6.9.0 diff --git a/metamist/graphql/__init__.py b/metamist/graphql/__init__.py index 5696293f7..4a88b5383 100644 --- a/metamist/graphql/__init__.py +++ b/metamist/graphql/__init__.py @@ -4,18 +4,23 @@ - construct queries using the `gql` function (which validates graphql syntax) - validate queries with metamist schema (by fetching the schema) """ + import os +from json.decoder import JSONDecodeError from typing import Any, Dict +import backoff from gql import Client from gql import gql as gql_constructor from gql.transport.aiohttp import AIOHTTPTransport from gql.transport.aiohttp import log as aiohttp_logger +from gql.transport.exceptions import TransportServerError from gql.transport.requests import RequestsHTTPTransport from gql.transport.requests import log as requests_logger # this does not import itself, it imports the module from graphql import DocumentNode # type: ignore +from requests.exceptions import HTTPError from cpg_utils.cloud import get_google_identity_token @@ -137,8 +142,17 @@ def validate(doc: DocumentNode, client=None, use_local_schema=False): # use older style typing to broaden supported Python versions +@backoff.on_exception( + backoff.expo, + exception=(HTTPError, JSONDecodeError, TransportServerError), + max_time=10, + max_tries=3, +) def query( - _query: str | DocumentNode, variables: Dict = None, client: Client = None, log_response: bool = False + _query: str | DocumentNode, + variables: Dict | None = None, + client: Client | None = None, + log_response: bool = False, ) -> Dict[str, Any]: """Query the metamist GraphQL API""" if variables is None: @@ -159,8 +173,17 @@ def query( return response +@backoff.on_exception( + backoff.expo, + exception=(HTTPError, JSONDecodeError, TransportServerError), + max_time=10, + max_tries=3, +) async def query_async( - _query: str | DocumentNode, variables: Dict = None, client: Client = None, log_response: bool = False + _query: str | DocumentNode, + variables: Dict | None = None, + client: Client | None = None, + log_response: bool = False, ) -> Dict[str, Any]: """Asynchronously query the Metamist GraphQL API""" if variables is None: diff --git a/models/base.py b/models/base.py index 2b728e2af..766be2388 100644 --- a/models/base.py +++ b/models/base.py @@ -8,6 +8,11 @@ class SMBase(BaseModel): """Base object for all models""" + @classmethod + def from_dict(cls, d: dict): + """Create an object from a dictionary""" + return cls(**d) + def parse_sql_bool(val: str | int | bytes) -> bool | None: """Parse a string from a sql bool""" diff --git a/models/models/__init__.py b/models/models/__init__.py index 8b100662f..cec58cda2 100644 --- a/models/models/__init__.py +++ b/models/models/__init__.py @@ -12,7 +12,7 @@ from models.models.assay import Assay, AssayInternal, AssayUpsert, AssayUpsertInternal from models.models.audit_log import AuditLogId, AuditLogInternal from models.models.billing import ( - BillingBatchCostRecord, + AnalysisCostRecord, BillingColumn, BillingCostBudgetRecord, BillingCostDetailsRecord, diff --git a/models/models/billing.py b/models/models/billing.py index 83cb7586f..5f324a533 100644 --- a/models/models/billing.py +++ b/models/models/billing.py @@ -350,33 +350,191 @@ def from_json(record): ) -class BillingBatchCostRecord(SMBase): +class AnalysisCostRecordTotal(SMBase): + """A model of attributes about the total for an analysis""" + + ar_guid: str | None + cost: float | None + usage_start_time: datetime.datetime | None + usage_end_time: datetime.datetime | None + + +class AnalysisCostRecordSku(SMBase): + """Cost for an SKU""" + + sku: str + cost: float + + +class AnalysisCostRecordSeqGroup(SMBase): + """ + Cost of a sequencing_group for some stage + sequencing_group = None: cost of all jobs for a stage without a sg + stage = None: cost of all jobs for a sg without a stage + """ + + sequencing_group: str | None + stage: str | None + cost: float + + +class AnalysisCostRecordBatchJob(SMBase): + """Hail batch container cost record""" + + job_id: str + job_name: str | None + skus: list[AnalysisCostRecordSku] + cost: float + usage_start_time: datetime.datetime + usage_end_time: datetime.datetime + + @classmethod + def from_dict(cls, d: dict): + return AnalysisCostRecordBatchJob( + job_id=d['job_id'], + job_name=d.get('job_name'), + skus=[AnalysisCostRecordSku.from_dict(row) for row in d.get('skus') or []], + cost=d['cost'], + usage_start_time=d['usage_start_time'], + usage_end_time=d['usage_end_time'], + ) + + +class AnalysisCostRecordBatch(SMBase): + """ + A list of these is on the 'BillingBatchCostRecord' + """ + + batch_id: str + batch_name: str | None + cost: float + usage_start_time: datetime.datetime + usage_end_time: datetime.datetime + jobs_cnt: int + skus: list[AnalysisCostRecordSku] + jobs: list[AnalysisCostRecordBatchJob] + seq_groups: list[AnalysisCostRecordSeqGroup] + + @classmethod + def from_dict(cls, d: dict): + return AnalysisCostRecordBatch( + batch_id=d['batch_id'], + batch_name=d.get('batch_name'), + cost=d['cost'], + usage_start_time=d['usage_start_time'], + usage_end_time=d['usage_end_time'], + jobs_cnt=d['jobs_cnt'], + skus=[AnalysisCostRecordSku.from_dict(row) for row in d.get('skus') or []], + jobs=[ + AnalysisCostRecordBatchJob.from_dict(row) for row in d.get('jobs') or [] + ], + seq_groups=[ + AnalysisCostRecordSeqGroup.from_dict(row) + for row in d.get('seq_groups') or [] + ], + ) + + +class AnalysisCostRecordCategory(SMBase): + """Category, cost, workflows""" + + category: str + cost: float + workflows: int | None + + @classmethod + def from_dict(cls, d: dict): + return AnalysisCostRecordCategory(**dict(d)) + + +class AnalysisCostRecordTopic(SMBase): + """Topic, cost""" + + topic: str + cost: float + + @classmethod + def from_dict(cls, d: dict): + return AnalysisCostRecordTopic(**dict(d)) + + +class AnalysisCostRecordWdlTask(SMBase): + """Cost of a WDL task""" + + wdl_task_name: str + cost: float + usage_start_time: datetime.datetime + usage_end_time: datetime.datetime + skus: list[AnalysisCostRecordSku] + + +class AnalysisCostRecordCromwellSubworkflow(SMBase): + """Cost of a Cromwell subworkflow""" + + cromwell_sub_workflow_name: str + cost: float + usage_start_time: datetime.datetime + usage_end_time: datetime.datetime + skus: list[AnalysisCostRecordSku] + + +class AnalysisCostRecordCromwellWorkflow(SMBase): + """Cost of a Cromwell workflow""" + + cromwell_workflow_id: str + cost: float + usage_start_time: datetime.datetime + usage_end_time: datetime.datetime + skus: list[AnalysisCostRecordSku] + + +class AnalysisCostRecord(SMBase): """Return class for the Billing Cost by batch_id/ar_guid""" - total: dict | None - topics: list[dict] | None - categories: list[dict] | None - batches: list[dict] | None - skus: list[dict] | None - seq_groups: list[dict] | None + total: AnalysisCostRecordTotal | None + topics: list[AnalysisCostRecordTopic] | None + categories: list[AnalysisCostRecordCategory] | None + batches: list[AnalysisCostRecordBatch] | None + skus: list[AnalysisCostRecordSku] | None + seq_groups: list[AnalysisCostRecordSeqGroup] | None - wdl_tasks: list[dict] | None - cromwell_sub_workflows: list[dict] | None - cromwell_workflows: list[dict] | None + wdl_tasks: list[AnalysisCostRecordWdlTask] | None + cromwell_sub_workflows: list[AnalysisCostRecordCromwellSubworkflow] | None + cromwell_workflows: list[AnalysisCostRecordCromwellWorkflow] | None dataproc: list[dict] | None - @staticmethod - def from_json(record): + @classmethod + def from_dict(cls, d: dict): """Create BillingBatchCostRecord from json""" - return BillingBatchCostRecord( - total=record.get('total'), - topics=record.get('topics'), - categories=record.get('categories'), - batches=record.get('batches'), - skus=record.get('skus'), - seq_groups=record.get('seq_groups'), - wdl_tasks=record.get('wdl_tasks'), - cromwell_sub_workflows=record.get('cromwell_sub_workflows'), - cromwell_workflows=record.get('cromwell_workflows'), - dataproc=record.get('dataproc'), + + return AnalysisCostRecord( + total=AnalysisCostRecordTotal.from_dict(d['total']), + topics=[ + AnalysisCostRecordTopic.from_dict(row) for row in d.get('topics') or [] + ], + categories=[ + AnalysisCostRecordCategory.from_dict(row) + for row in d.get('categories') or [] + ], + batches=[ + AnalysisCostRecordBatch.from_dict(row) for row in d.get('batches') or [] + ], + skus=[AnalysisCostRecordSku.from_dict(row) for row in d.get('skus') or []], + seq_groups=[ + AnalysisCostRecordSeqGroup.from_dict(row) + for row in d.get('seq_groups') or [] + ], + wdl_tasks=[ + AnalysisCostRecordWdlTask.from_dict(row) + for row in d.get('wdl_tasks') or [] + ], + cromwell_sub_workflows=[ + AnalysisCostRecordCromwellSubworkflow.from_dict(row) + for row in d.get('cromwell_sub_workflows') or [] + ], + cromwell_workflows=[ + AnalysisCostRecordCromwellWorkflow.from_dict(row) + for row in d.get('cromwell_workflows') or [] + ], + dataproc=d.get('dataproc'), ) diff --git a/openapi-templates/api_client.mustache b/openapi-templates/api_client.mustache index d78bf1909..cf7296202 100644 --- a/openapi-templates/api_client.mustache +++ b/openapi-templates/api_client.mustache @@ -35,6 +35,21 @@ from {{packageName}}.model_utils import ( validate_and_convert_types ) +def get_select_env_values(): + env_values = { + 'HAIL_ATTEMPT_ID': 'HAIL_ATTEMPT_ID', + 'HAIL_BATCH_ID': 'HAIL_BATCH_ID', + 'HAIL_JOB_ID': 'HAIL_JOB_ID', + } + + as_map = {} + for env_key, dict_key in env_values.items(): + value = os.getenv(env_key) + if value: + as_map[dict_key] = value + + return as_map + class ApiClient(object): """Generic API client for OpenAPI client library builds. @@ -74,6 +89,10 @@ class ApiClient(object): self.default_headers['sm-ar-guid'] = ar_guid if header_name is not None: self.default_headers[header_name] = header_value + extra_values = get_select_env_values() + if extra_values: + self.default_headers['sm-extra-values'] = json.dumps(extra_values) + self.cookie = cookie # Set default User-Agent. self.user_agent = '{{{httpUserAgent}}}{{^httpUserAgent}}OpenAPI-Generator/{{{packageVersion}}}/python{{/httpUserAgent}}' diff --git a/setup.py b/setup.py index 7e6ad5d47..bcd885cad 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ setup( name=PKG, # This tag is automatically updated by bump2version - version='6.8.0', + version='6.9.0', description='Python API for interacting with the Sample API system', long_description=readme, long_description_content_type='text/markdown', @@ -27,6 +27,7 @@ license='MIT', packages=all_packages, install_requires=[ + 'backoff>=2.2.1', 'click', 'google-auth', 'google-api-core', # dependency to google-auth that however is not diff --git a/test/test_api_billing.py b/test/test_api_billing.py index 2f5c00665..5f8e9ad2c 100644 --- a/test/test_api_billing.py +++ b/test/test_api_billing.py @@ -1,4 +1,5 @@ # pylint: disable=protected-access too-many-public-methods +import datetime import json from test.testbase import run_as_sync from test.testbqbase import BqTest @@ -6,7 +7,7 @@ from api.routes import billing from models.models import ( - BillingBatchCostRecord, + AnalysisCostRecord, BillingColumn, BillingCostBudgetRecord, BillingTotalCostQueryModel, @@ -56,27 +57,33 @@ async def test_get_cost_by_ar_guid( """ ar_guid = 'test_ar_guid' mockup_record_json = { - 'total': {'ar_guid': ar_guid}, - 'topics': None, - 'categories': None, + 'total': { + 'ar_guid': ar_guid, + 'cost': 0.0, + 'usage_end_time': None, + 'usage_start_time': None, + }, + 'topics': [], + 'categories': [], 'batches': [], - 'skus': None, - 'seq_groups': None, - 'wdl_tasks': None, - 'cromwell_sub_workflows': None, - 'cromwell_workflows': None, - 'dataproc': None, + 'skus': [], + 'seq_groups': [], + 'wdl_tasks': [], + 'cromwell_sub_workflows': [], + 'cromwell_workflows': [], + 'dataproc': [], } - mockup_record = [BillingBatchCostRecord.from_json(mockup_record_json)] + mockup_record = [AnalysisCostRecord.from_dict(mockup_record_json)] mock_get_billing_layer.return_value = self.layer mock_get_cost_by_ar_guid.return_value = mockup_record response = await billing.get_cost_by_ar_guid( ar_guid, author=TEST_API_BILLING_USER ) - self.assertEqual( - [mockup_record_json], json.loads(response.body.decode('utf-8')) - ) + resp_json = json.loads(response.body.decode('utf-8')) + self.assertEqual(1, len(resp_json)) + + self.assertDictEqual(mockup_record_json, resp_json[0]) @run_as_sync @patch('api.routes.billing._get_billing_layer_from') @@ -90,27 +97,45 @@ async def test_get_cost_by_batch_id( ar_guid = 'test_ar_guid' batch_id = 'test_batch_id' mockup_record_json = { - 'total': {'ar_guid': ar_guid}, - 'topics': None, - 'categories': None, - 'batches': [{'batch_id': batch_id}], - 'skus': None, - 'seq_groups': None, - 'wdl_tasks': None, - 'cromwell_sub_workflows': None, - 'cromwell_workflows': None, - 'dataproc': None, + 'total': { + 'ar_guid': ar_guid, + 'cost': 0.0, + 'usage_end_time': None, + 'usage_start_time': None, + }, + 'topics': [], + 'categories': [], + 'batches': [ + { + 'batch_id': batch_id, + 'batch_name': None, + 'cost': 0.0, + 'usage_start_time': datetime.datetime.now().isoformat(), + 'usage_end_time': datetime.datetime.now().isoformat(), + 'jobs_cnt': 0, + 'skus': [], + 'jobs': [], + 'seq_groups': [], + } + ], + 'skus': [], + 'seq_groups': [], + 'wdl_tasks': [], + 'cromwell_sub_workflows': [], + 'cromwell_workflows': [], + 'dataproc': [], } - mockup_record = [BillingBatchCostRecord.from_json(mockup_record_json)] + mockup_record = [AnalysisCostRecord.from_dict(mockup_record_json)] mock_get_billing_layer.return_value = self.layer mock_get_cost_by_batch_id.return_value = mockup_record response = await billing.get_cost_by_batch_id( batch_id, author=TEST_API_BILLING_USER ) - self.assertEqual( - [mockup_record_json], json.loads(response.body.decode('utf-8')) - ) + resp_json = json.loads(response.body.decode('utf-8')) + + self.assertEqual(1, len(resp_json)) + self.assertDictEqual(mockup_record_json, resp_json[0]) @run_as_sync @patch('api.routes.billing._get_billing_layer_from') diff --git a/web/package-lock.json b/web/package-lock.json index f04895d4d..8fee2c4a1 100644 --- a/web/package-lock.json +++ b/web/package-lock.json @@ -1,12 +1,12 @@ { "name": "metamist", - "version": "6.6.2", + "version": "6.8.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "metamist", - "version": "6.6.2", + "version": "6.8.0", "dependencies": { "@apollo/client": "^3.7.3", "@artsy/fresnel": "^6.2.1", diff --git a/web/package.json b/web/package.json index 1c1527f30..6c247c0eb 100644 --- a/web/package.json +++ b/web/package.json @@ -1,6 +1,6 @@ { "name": "metamist", - "version": "6.8.0", + "version": "6.9.0", "private": true, "dependencies": { "@apollo/client": "^3.7.3", diff --git a/web/src/pages/billing/BillingCostByAnalysis.tsx b/web/src/pages/billing/BillingCostByAnalysis.tsx index 181b8c1a6..a87515a80 100644 --- a/web/src/pages/billing/BillingCostByAnalysis.tsx +++ b/web/src/pages/billing/BillingCostByAnalysis.tsx @@ -4,7 +4,7 @@ import { Button, Card, Grid, Input, Message, Select, Dropdown } from 'semantic-u import SearchIcon from '@mui/icons-material/Search' import LoadingDucks from '../../shared/components/LoadingDucks/LoadingDucks' -import { BillingApi, BillingTotalCostRecord, AnalysisApi } from '../../sm-api' +import { BillingApi, AnalysisCostRecord } from '../../sm-api' import BatchGrid from './components/BatchGrid' import { getMonthStartDate } from '../../shared/utilities/monthStartEndDate' @@ -22,37 +22,22 @@ const BillingCostByAnalysis: React.FunctionComponent = () => { // Data loading const [isLoading, setIsLoading] = React.useState(true) const [error, setError] = React.useState() - + const [data, setData] = React.useState() const [start, setStart] = React.useState( searchParams.get('start') ?? getMonthStartDate() ) - const [data, setData] = React.useState(undefined) - - const setArData = (arData: []) => { + const setBillingRecord = (records: AnalysisCostRecord[]) => { setIsLoading(false) // arData is an array of objects, we use only the first obejct // in the future we maye have search by several ar_guids / author etc. - if (arData === undefined || arData.length === 0) { + setData(records) + if (!records || records.length === 0) { // nothing found setIsLoading(false) setData(null) return } - const ar_record = arData[0] - if (!!ar_record?.total?.ar_guid) { - new AnalysisApi() - .getAnalysisRunnerLog(undefined, undefined, ar_record.total.ar_guid, undefined) - .then((response) => { - // combine arData and getAnalysisRunnerLog - if (response.data.length > 0) { - // use only the first record for now - ar_record.analysisRunnerLog = response.data[0] - } - setData(ar_record) - }) - .catch((er) => setError(er.message)) - } setIsLoading(false) } @@ -65,7 +50,7 @@ const BillingCostByAnalysis: React.FunctionComponent = () => { })) const [searchByType, setSearchByType] = React.useState( - SearchType[searchParams.get('searchType')] ?? SearchType[0] + (searchParams.get('searchType') as SearchType) ?? SearchType[0] ) // use navigate and update url params @@ -96,14 +81,14 @@ const BillingCostByAnalysis: React.FunctionComponent = () => { new BillingApi() .costByArGuid(sTxt) .then((response) => { - setArData(response.data) + setBillingRecord(response.data) }) .catch((er) => setError(er.message)) } else if (convertedType === SearchType.Batch_id) { new BillingApi() .costByBatchId(sTxt) .then((response) => { - setArData(response.data) + setBillingRecord(response.data) }) .catch((er) => setError(er.message)) } else { @@ -231,21 +216,16 @@ const BillingCostByAnalysis: React.FunctionComponent = () => { ) - const batchGrid = (gridData: BillingTotalCostRecord) => + // const batchGrid = (gridData: BillingTotalCostRecord) => const dataComponent = () => { - if (data !== undefined && data !== null) { + if ((data?.length || 0) > 0) { // only render grid if there are available cost data - return batchGrid(data) + return data!.map((row, idx) => ) } // if valid search text and no data return return No data message - if ( - data === null && - searchByType !== undefined && - searchTxt !== undefined && - searchTxt.length > 5 - ) { + if (!!data && !!searchByType && searchTxt?.length > 5) { return (

No data found. diff --git a/web/src/pages/billing/components/BatchGrid.tsx b/web/src/pages/billing/components/BatchGrid.tsx index def276078..d02dcb49c 100644 --- a/web/src/pages/billing/components/BatchGrid.tsx +++ b/web/src/pages/billing/components/BatchGrid.tsx @@ -1,571 +1,451 @@ -import * as React from 'react' -import { Table as SUITable, Card, Checkbox } from 'semantic-ui-react' import _ from 'lodash' -import { DonutChart } from '../../../shared/components/Graphs/DonutChart' +import * as React from 'react' +import { Card } from 'semantic-ui-react' import '../../project/AnalysisRunnerView/AnalysisGrid.css' -import { TableVirtuoso } from 'react-virtuoso' - -import Table from '@mui/material/Table' -import TableBody from '@mui/material/TableBody' -import TableContainer from '@mui/material/TableContainer' -import TableHead from '@mui/material/TableHead' -import TableRow from '@mui/material/TableRow' -import Paper from '@mui/material/Paper' -import formatMoney from '../../../shared/utilities/formatMoney' -const hailBatchUrl = 'https://batch.hail.populationgenomics.org.au/batches' +import { Table as SUITable } from 'semantic-ui-react' -const BatchGrid: React.FunctionComponent<{ - data: any -}> = ({ data }) => { - const [openRows, setOpenRows] = React.useState([]) +import Table, { CheckboxRow, DisplayRow } from '../../../shared/components/Table' - const handleToggle = (position: string) => { - if (!openRows.includes(position)) { - setOpenRows([...openRows, position]) - } else { - setOpenRows(openRows.filter((value) => value !== position)) - } - } +import { useQuery } from '@apollo/client' +import { gql } from '../../../__generated__' +import MuckTheDuck from '../../../shared/components/MuckTheDuck' +import formatMoney from '../../../shared/utilities/formatMoney' +import { AnalysisCostRecord, AnalysisCostRecordBatch } from '../../../sm-api' +import { BatchJobsTable } from './BatchJobGrid' +import { CostBySkuRow, SeqGrpDisplay } from './BillingByAnalysisComponents' + +interface IGenericCardData { + cost: number + jobs_cnt?: number + usage_start_time: string + usage_end_time: string + skus: { + sku: string + cost: number + }[] +} - const prepareBatchUrl = (batch_id: string) => ( - - BATCH ID: {batch_id} - - ) +const hailBatchUrl = 'https://batch.hail.populationgenomics.org.au/batches' - const prepareBgColor = (log: any) => { - if (log.batch_id === undefined) { - return 'var(--color-border-color)' - } - if (log.job_id === undefined) { - return 'var(--color-border-default)' +const BatchUrlLink: React.FC<{ batch_id: string }> = ({ batch_id }) => ( + + BATCH ID: {batch_id} + +) + +const GET_AR_RECORDS = gql(` + query BillingByAnalysisRunnerLog($arGuid: String!) { + analysisRunner(arGuid: $arGuid) { + arGuid + timestamp + accessLevel + repository + commit + script + description + driverImage + configPath + cwd + environment + hailVersion + batchUrl + submittingUser + meta + outputPath } - return 'var(--color-bg)' - } - - const calcDuration = (dataItem) => { - const duration = new Date(dataItem.usage_end_time) - new Date(dataItem.usage_start_time) - const seconds = Math.floor((duration / 1000) % 60) - const minutes = Math.floor((duration / (1000 * 60)) % 60) - const hours = Math.floor((duration / (1000 * 60 * 60)) % 24) - const formattedDuration = `${hours}h ${minutes}m ${seconds}s` - return {formattedDuration} } +`) - const idx = 0 - - const displayCheckBoxRow = ( - parentToggle: string, - key: string, - toggle: string, - text: string - ) => ( - - - - handleToggle(toggle)} - /> - - {text} - - ) - - const displayTopLevelCheckBoxRow = (key: string, text: string) => ( - - - handleToggle(key)} - /> - - {text} - - ) - - const displayRow = (toggle: string, key: string, label: string, text: string) => ( - - - - {label} - - {text} - - ) - - const displayCostBySkuRow = ( - parentToggles: list, - toggle: string, - chartId: string, - chartMaxWidth: number, - colSpan: number, - data: any - ) => ( - <> - openRows.includes(p)) && - openRows.includes(toggle) - ? 'table-row' - : 'none', - backgroundColor: 'var(--color-bg)', - }} - key={toggle} - > - - - - {chartId && ( - ({ - label: srec.sku, - value: srec.cost, - }))} - maxSlices={data.skus.length} - showLegend={false} - isLoading={false} - maxWidth={chartMaxWidth} - /> - )} - - - - SKU - COST - - - - {data.skus.map((srec, sidx) => ( - - {srec.sku} - {formatMoney(srec.cost, 4)} - - ))} - - - - - - ) - - const displayCostBySeqGrpRow = ( - parentToggle: string, - key: string, - toggle: string, - textCheckbox: string, - data: any - ) => ( - <> - {displayCheckBoxRow(parentToggle, key, toggle, textCheckbox)} - - - - - - - - SEQ GROUP - STAGE - COST - - - - {data.seq_groups - .sort((a, b) => b.cost - a.cost) // Sort by cost in descending order - .map((gcat, gidx) => ( - - {gcat.sequencing_group} - {gcat.stage} - {formatMoney(gcat.cost, 4)} - - ))} - - - - - - ) - - const displayCommonSection = (key: string, header: string, data: any) => ( - <> - {displayTopLevelCheckBoxRow(`row-${key}`, `${header}`)} - - {displayRow( - '', - `${key}-detail-cost`, - 'Cost', - `${formatMoney(data.cost, 4)} ${ - data.jobs_cnt > 0 ? ` (across ${data.jobs_cnt} jobs)` : '' - }` - )} - - {displayRow(`row-${key}`, `${key}-detail-start`, 'Start', data.usage_start_time)} - {displayRow(`row-${key}`, `${key}-detail-end`, 'End', data.usage_end_time)} - - {displayCheckBoxRow(`row-${key}`, `sku-toggle-${key}`, `sku-${key}`, 'Cost By SKU')} - {displayCostBySkuRow([`row-${key}`], `sku-${key}`, `donut-chart-${key}`, 600, 1, data)} - - ) - - const ExpandableRow = ({ item, ...props }) => { - const index = props['data-index'] - return ( - - - - handleToggle(`${item.batch_id}-${item.job_id}`)} - /> - - {item.job_id} - {item.job_name} - {item.usage_start_time} - {calcDuration(item)} - {formatMoney(item.cost, 4)} - - - {/* cost by SKU */} - {displayCostBySkuRow( - [`row-${item.batch_id}`, `jobs-${item.batch_id}`], - `${item.batch_id}-${item.job_id}`, - undefined, - undefined, - 4, - item - )} - - ) - } +const AnalysisRunnerRecordCard: React.FC<{ data: AnalysisCostRecord }> = ({ data, ...props }) => { + const [isOpen, setIsOpen] = React.useState(false) + const [isTopicsOpen, setIsTopicsOpen] = React.useState(false) + const [isSeqGroupOpen, setIsSeqGroupOpen] = React.useState(false) + const [isSkuOpen, setIsSkuOpen] = React.useState(false) - const TableComponents = { - Scroller: React.forwardRef((props, ref) => ( - - )), - Table: (props) => , - TableHead: TableHead, - TableRow: ExpandableRow, - TableBody: React.forwardRef((props, ref) => ), - } + const arGuid = data?.total?.ar_guid + const queryResponse = useQuery(GET_AR_RECORDS, { + skip: !arGuid, + variables: { arGuid: arGuid! }, + }) - const displayJobsTable = (item) => ( - 1 ? 800 : 400, backgroundColor: 'var(--color-bg)' }} - className="ui celled table compact" - useWindowScroll={false} - data={item.jobs.sort((a, b) => { - // Sorts an array of objects first by 'job_id' in ascending order. - if (a.job_id < b.job_id) { - return -1 - } - if (a.job_id > b.job_id) { - return 1 - } - return 0 - })} - fixedHeaderContent={() => ( - - - JOB ID - NAME - START - DURATION - COST - - )} - components={TableComponents} - /> - ) + const arRecord = queryResponse?.data?.analysisRunner - const arGuidCard = (idx, data) => ( - - + return ( + +
<> - {displayTopLevelCheckBoxRow(`row-${idx}`, `AR-GUID: ${data.total.ar_guid}`)} + + AR-GUID: {arGuid} + - {displayRow( - '', - `${idx}-detail-cost`, - 'Total cost', - formatMoney(data.total.cost, 2) - )} + + {formatMoney(data.total?.cost, 2)} + {/* cost by categories */} - {data.categories.map((tcat, cidx) => { + {data?.categories?.map((tcat, cidx) => { const workflows = tcat.workflows !== null ? ` (across ${tcat.workflows} workflows)` : '' - return displayRow( - '', - `categories-${idx}-${cidx}`, - tcat.category, - `${formatMoney(tcat.cost, 2)} ${workflows}` + return ( + + {formatMoney(tcat.cost, 2)} {workflows} + ) })} - {displayRow( - '', - `${idx}-detail-start`, - 'Start', - data.total.usage_start_time - )} - {displayRow('', `${idx}-detail-end`, 'End', data.total.usage_end_time)} + {data?.total?.usage_start_time} + {data?.total?.usage_end_time} + + {/* cost by topics */} {/* all meta if present */} - {data.analysisRunnerLog && - Object.keys(data.analysisRunnerLog.meta).map((key) => { - const mcat = data.analysisRunnerLog.meta[key] - return displayRow(`row-${idx}`, `${idx}-meta-${key}`, key, mcat) - })} + {queryResponse.loading && ( + + + + )} + {queryResponse.error && ( + + {queryResponse.error.message} + + )} + {!!arRecord && ( + <> + {/* submitingUser, repository, commit, script, description, outputPath, configPath, cwd, hailVersion */} + + + {arRecord.submittingUser} + + + {arRecord.accessLevel} + + + {arRecord.repository} + + + {arRecord.commit} + + + {arRecord.script} + + + {arRecord.description} + + + {arRecord.outputPath} + + + {arRecord.configPath} + + + {arRecord.cwd} + + {/* hail version */} + + {arRecord.hailVersion} + + + )} {/* cost by topics */} - {displayCheckBoxRow( - `row-${idx}`, - `topics-toggle-${idx}`, - `topics-${idx}`, - 'Cost By Topic' - )} - - - - - - - - Topic - Cost + Cost by Topics - {data.topics?.length || 0} topic(s) + + +
+ + + Topic + Cost + + + + {data.topics?.map((trec, tidx) => ( + + {trec.topic} + + {formatMoney(trec.cost, 2)} + - - - {data.topics.map((trec, tidx) => ( - - {trec.topic} - - {formatMoney(trec.cost, 2)} - - - ))} - - - - + ))} + +
+ {/* cost by seq groups */} - {displayCostBySeqGrpRow( - `row-${idx}`, - `seq-grp-toggle-${idx}`, - `seq-grp-${idx}`, - 'Cost By Sequencing Group', - data - )} + + + Cost by Sequencing Groups -{' '} + {data.seq_groups?.filter((s) => !!s.sequencing_group)?.length || 0}{' '} + sequencing group(s) + + + + {/* cost by SKU */} - {displayCheckBoxRow( - `row-${idx}`, - `sku-toggle-${idx}`, - `sku-${idx}`, - 'Cost By SKU' - )} - {displayCostBySkuRow( - [`row-${idx}`], - `sku-${idx}`, - 'total-donut-chart', - 600, - 1, - data - )} + + Cost by SKU + + + + - + ) +} + +const BatchCard: React.FC<{ item: AnalysisCostRecordBatch }> = ({ item }) => { + const [isOpen, setIsOpen] = React.useState(false) + + const [isSeqGroupOpen, setIsSeqGroupOpen] = React.useState(false) + const [isSkuOpen, setIsSkuOpen] = React.useState(false) + const [isJobsOpen, setIsJobsOpen] = React.useState(false) + const [jobOpenSet, setJobOpenSet] = React.useState>(new Set()) - const batchCard = (item) => ( + const isDriverBatch = item.jobs[0]?.job_name === 'driver' + + return ( - + - {displayTopLevelCheckBoxRow( - `row-${item.batch_id}`, - prepareBatchUrl(item.batch_id) + + + + {isDriverBatch && True} + {item.batch_name} + {item.jobs?.length === 1 && item.jobs[0].job_name && ( + {item.jobs[0].job_name} )} - {displayRow('', `${item.batch_id}-detail-name`, 'Batch Name', item.batch_name)} - - {item.jobs_cnt === 1 - ? displayRow( - '', - `${item.batch_id}-detail-job-name`, - 'Job Name', - item.jobs[0].job_name - ) - : null} - - {displayRow( - '', - `${item.batch_id}-detail-cost`, - 'Cost', - `${formatMoney(item.cost, 4)} ${ - item.jobs_cnt !== null ? ` (across ${item.jobs_cnt} jobs)` : '' - }` - )} + + {formatMoney(item.cost, 4)}{' '} + {item.jobs?.length > 0 && - across {item.jobs.length} job(s)} + - {displayRow( - `row-${item.batch_id}`, - `${item.batch_id}-detail-start`, - 'Start', - data.total.usage_start_time - )} - {displayRow( - `row-${item.batch_id}`, - `${item.batch_id}-detail-end`, - 'End', - data.total.usage_end_time - )} + + {item.usage_start_time} + + + {item.usage_end_time} + {/* cost by seq groups */} - {displayCostBySeqGrpRow( - `row-${item.batch_id}`, - `seq-grp-toggle-${item.batch_id}`, - `seq-grp-${item.batch_id}`, - 'Cost By Sequencing Group', - item - )} + + Cost by Sequencing Groups -{' '} + {item.seq_groups?.filter((s) => !!s.sequencing_group)?.length || 0}{' '} + sequencing group(s) + + + + {/* cost by SKU */} - {displayCheckBoxRow( - `row-${item.batch_id}`, - `sku-toggle-${item.batch_id}`, - `sku-${item.batch_id}`, - 'Cost By SKU' - )} - {displayCostBySkuRow( - [`row-${item.batch_id}`], - `sku-${item.batch_id}`, - `donut-chart-${item.batch_id}`, - 600, - 1, - item - )} + + Cost by SKU + + + + {/* cost by jobs */} {item.jobs_cnt > 1 && ( <> - {displayCheckBoxRow( - `row-${item.batch_id}`, - `jobs-toggle-${item.batch_id}`, - `jobs-${item.batch_id}`, - 'Cost By JOBS' - )} - - - - {displayJobsTable(item)} - + Cost by Jobs - {item.jobs_cnt} job(s) + + + + {/* Batch jobs table */} + )} - +
) +} - const genericCard = (item, data, label) => ( - - - {displayCommonSection(data, label, item)} - - - ) +const BatchGrid: React.FunctionComponent<{ + data: AnalysisCostRecord +}> = ({ data }) => { + const [openRows, setOpenRows] = React.useState([]) + + const handleToggle = (position: string) => { + if (!openRows.includes(position)) { + setOpenRows([...openRows, position]) + } else { + setOpenRows(openRows.filter((value) => value !== position)) + } + } + + const prepareBgColor = (log: any) => { + if (log.batch_id === undefined) { + return 'var(--color-border-color)' + } + if (log.job_id === undefined) { + return 'var(--color-border-default)' + } + return 'var(--color-bg)' + } + + const GenericCard: React.FC<{ + data: IGenericCardData + label: string + pkey: string + }> = ({ data, label, pkey }) => { + const [isOpen, setIsOpen] = React.useState(false) + const [skuIsOpen, setSkuIsOpen] = React.useState(false) + + return ( + + + + + {label} + + + + {formatMoney(data.cost, 4)}{' '} + {(data?.jobs_cnt || 0) > 0 && - across {data.jobs_cnt} job(s)} + + {data.usage_start_time} + {data.usage_end_time} + + {/* cost by SKU */} + + Cost by SKU + + + + + +
+
+ ) + } return ( <> - {arGuidCard(idx, data)} - - {data.batches.map((item) => batchCard(item))} - - {data.dataproc.map((item) => genericCard(item, item.dataproc, `DATAPROC`))} - - {data.wdl_tasks.map((item) => - genericCard(item, item.wdl_task_name, `WDL TASK NAME: ${item.wdl_task_name}`) - )} - - {data.cromwell_sub_workflows.map((item) => - genericCard( - item, - item.cromwell_sub_workflow_name, - `CROMWELL SUB WORKFLOW NAME: ${item.cromwell_sub_workflow_name}` - ) - )} - - {data.cromwell_workflows.map((item) => - genericCard( - item, - item.cromwell_workflow_id, - `CROMWELL WORKFLOW ID: ${item.cromwell_workflow_id}` - ) - )} + + + {_.orderBy(data?.batches || [], (b) => b.batch_id).map((batchRecord) => ( + + ))} + + {data.dataproc?.map((item, idx) => ( + + ))} + + {data.wdl_tasks?.map((item) => ( + + ))} + + {data.cromwell_sub_workflows?.map((item) => ( + + ))} + + {data.cromwell_workflows?.map((item) => ( + + ))} ) } diff --git a/web/src/pages/billing/components/BatchJobGrid.tsx b/web/src/pages/billing/components/BatchJobGrid.tsx new file mode 100644 index 000000000..b81e23cd6 --- /dev/null +++ b/web/src/pages/billing/components/BatchJobGrid.tsx @@ -0,0 +1,95 @@ +import React from 'react' +import * as _ from 'lodash' +import TableBody from '@mui/material/TableBody' +import TableContainer from '@mui/material/TableContainer' +import TableHead from '@mui/material/TableHead' +import TableRow from '@mui/material/TableRow' +import Paper from '@mui/material/Paper' +import { AnalysisCostRecordBatch, AnalysisCostRecordBatchJob } from '../../../sm-api' + +import Table, { CheckboxRow, DisplayRow } from '../../../shared/components/Table' +import { TableVirtuoso } from 'react-virtuoso' +import { Checkbox } from 'semantic-ui-react' +import { Table as SUITable, TableProps } from 'semantic-ui-react' +import { calcDuration, CostBySkuRow } from './BillingByAnalysisComponents' +import formatMoney from '../../../shared/utilities/formatMoney' + +const ExpandableRow: React.FC<{ item: AnalysisCostRecordBatchJob }> = ({ item, ...props }) => { + const [isOpen, setIsOpen] = React.useState(false) + + return ( + <> + + + setIsOpen(!isOpen)} /> + + {item.job_id} + {item.job_name} + {item.usage_start_time} + + {calcDuration(item.usage_start_time, item.usage_end_time)} + + {formatMoney(item.cost, 4)} + + + + + + ) +} + +const TableComponents = { + Scroller: React.forwardRef((props, ref) => ( + + )), + Table: (props) => , + TableHead: TableHead, + TableRow: ExpandableRow, + TableBody: React.forwardRef((props, ref) => ), +} + +export const BatchJobsTable: React.FC<{ batch: AnalysisCostRecordBatch }> = ({ batch }) => { + const [sortedData, setSortedData] = React.useState( + _.orderBy(batch.jobs, (j) => parseInt(j.job_id)) + ) + + React.useEffect(() => { + // sort here to avoid sorting on each render + setSortedData(_.orderBy(batch.jobs, (j) => parseInt(j.job_id))) + }, [batch]) + + return ( + 1 ? 800 : 400, + backgroundColor: 'var(--color-bg)', + }} + className="ui celled table compact" + useWindowScroll={false} + data={sortedData} + fixedHeaderContent={() => ( + + + JOB ID + NAME + START + DURATION + COST + + )} + components={TableComponents} + /> + ) +} diff --git a/web/src/pages/billing/components/BillingByAnalysisComponents.tsx b/web/src/pages/billing/components/BillingByAnalysisComponents.tsx new file mode 100644 index 000000000..26bee7de3 --- /dev/null +++ b/web/src/pages/billing/components/BillingByAnalysisComponents.tsx @@ -0,0 +1,86 @@ +import React from 'react' + +import Table, { CheckboxRow, DisplayRow } from '../../../shared/components/Table' +import { Table as SUITable, TableProps } from 'semantic-ui-react' +import { DonutChart } from '../../../shared/components/Graphs/DonutChart' +import { AnalysisCostRecordSeqGroup, AnalysisCostRecordSku } from '../../../sm-api' +import formatMoney from '../../../shared/utilities/formatMoney' + +export const calcDuration = (start: string | number | Date, end: string | number | Date) => { + const duration = new Date(end).valueOf() - new Date(start).valueOf() + const seconds = Math.floor((duration / 1000) % 60) + const minutes = Math.floor((duration / (1000 * 60)) % 60) + const hours = Math.floor((duration / (1000 * 60 * 60)) % 24) + const formattedDuration = `${hours}h ${minutes}m ${seconds}s` + return {formattedDuration} +} + +export const CostBySkuRow: React.FC<{ + chartId?: string + chartMaxWidth?: string + colSpan: number + skus: AnalysisCostRecordSku[] +}> = ({ chartId, chartMaxWidth, colSpan, skus }) => ( + <> + {chartId && ( + ({ + label: srec.sku, + value: srec.cost, + }))} + maxSlices={skus.length} + showLegend={false} + isLoading={false} + maxWidth={chartMaxWidth} + /> + )} +
+ + + SKU + COST + + + + {skus.map((srec, sidx) => ( + + {srec.sku} + {formatMoney(srec.cost, 4)} + + ))} + +
+ +) + +export const SeqGrpDisplay: React.FC<{ seq_groups: AnalysisCostRecordSeqGroup[] }> = ({ + seq_groups, +}) => { + if (!seq_groups) { + return No sequencing groups + } + + return ( + + + + SEQ GROUP + STAGE + COST + + + + {seq_groups + .sort((a, b) => b.cost - a.cost) // Sort by cost in descending order + .map((gcat) => ( + + {gcat.sequencing_group} + {gcat.stage} + {formatMoney(gcat.cost, 4)} + + ))} + +
+ ) +} diff --git a/web/src/pages/billing/components/TableHelpers.tsx b/web/src/pages/billing/components/TableHelpers.tsx new file mode 100644 index 000000000..8d7c8d09e --- /dev/null +++ b/web/src/pages/billing/components/TableHelpers.tsx @@ -0,0 +1,83 @@ +import { range } from 'lodash' +import { Table as SUITable, Card, Checkbox } from 'semantic-ui-react' + +import { + AnalysisCostRecord, + AnalysisCostRecordTotal, + AnalysisCostRecordBatch, + AnalysisCostRecordBatchJob, + Analysis, + AnalysisCostRecordSku, + AnalysisCostRecordSeqGroup, +} from '../../../sm-api' + +interface ICheckboxRowProps { + isChecked: boolean + setIsChecked: (isChecked: boolean) => void + isVisible?: boolean + colSpan?: number + rowStyle?: React.CSSProperties + leadingCells?: number +} + +const CheckboxRow: React.FC = ({ + children, + setIsChecked, + isChecked, + isVisible, + colSpan, + rowStyle, + leadingCells, + ...props +}) => ( + + {leadingCells && + range(0, leadingCells).map((cell, idx) => ( + + ))} + {/* */} + + { + // debugger + setIsChecked(!isChecked) + }} + /> + + {children} + +) + +const DisplayRow: React.FC<{ isVisible?: boolean; label: string }> = ({ + children, + label, + isVisible = true, + ...props +}) => { + const _isVisible = isVisible === undefined ? true : isVisible + + return ( + + + + {label} + + {children} + + ) +} diff --git a/web/src/shared/components/Table.tsx b/web/src/shared/components/Table.tsx index b98d644aa..08eeeb6c1 100644 --- a/web/src/shared/components/Table.tsx +++ b/web/src/shared/components/Table.tsx @@ -2,7 +2,10 @@ // to the table if the user has dark mode enabled in their browser. import * as React from 'react' -import { Table as SUITable, TableProps } from 'semantic-ui-react' +import { range } from 'lodash' + +import { Table as SUITable, TableProps, Checkbox } from 'semantic-ui-react' + import { ThemeContext } from './ThemeProvider' const Table: React.FunctionComponent = ({ className, ...props }) => { @@ -17,4 +20,82 @@ const Table: React.FunctionComponent = ({ className, ...props }) => return } +interface ICheckboxRowProps { + isChecked: boolean + setIsChecked: (isChecked: boolean) => void + isVisible?: boolean + colSpan?: number + rowStyle?: React.CSSProperties + leadingCells?: number +} + +export const CheckboxRow: React.FC = ({ + children, + setIsChecked, + isChecked, + isVisible, + colSpan, + rowStyle, + leadingCells, + ...props +}) => ( + + {leadingCells && + range(0, leadingCells).map((cell, idx) => ( + + ))} + {/* */} + + { + // debugger + setIsChecked(!isChecked) + }} + /> + + {children} + +) +interface IDisplayRowProps { + isVisible?: boolean + label?: string + colSpan?: number +} + +export const DisplayRow: React.FC = ({ + children, + label, + colSpan, + isVisible = true, + ...props +}) => { + const _isVisible = isVisible === undefined ? true : isVisible + + return ( + + + + {label} + + {children} + + ) +} + export default Table diff --git a/web/src/shared/utilities/formatMoney.ts b/web/src/shared/utilities/formatMoney.ts index d6039aba2..34f026640 100644 --- a/web/src/shared/utilities/formatMoney.ts +++ b/web/src/shared/utilities/formatMoney.ts @@ -1,3 +1,3 @@ -const formatMoney = (val: number, dp: number = 2): string => `$${val.toFixed(dp).replace(/\d(?=(\d{3})+\.)/g, '$&,')}` +const formatMoney = (val: number | undefined, dp: number = 2): string => val ? `$${val.toFixed(dp).replace(/\d(?=(\d{3})+\.)/g, '$&,')}` : '' export default formatMoney