Skip to content

Commit

Permalink
Fixing linting issues after merging from dev.
Browse files Browse the repository at this point in the history
  • Loading branch information
milo-hyben committed Sep 18, 2023
1 parent a61b39c commit 4db8f9b
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 14 deletions.
12 changes: 6 additions & 6 deletions etl/load/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import json
import logging
import os
from typing import Any, Dict
from typing import Any, Dict, List

import flask
import functions_framework
Expand All @@ -21,12 +21,12 @@
DEFAULT_LOAD_CONFIG = os.getenv('DEFAULT_LOAD_CONFIG', '{}')


def call_parser(parser_obj, row_json):
def call_parser(parser_obj, row_json) -> tuple[str, str]:
"""
This function calls parser_obj.from_json and returns status and result
"""
tmp_res = []
tmp_status = []
tmp_res: List[str] = []
tmp_status: List[str] = []

# GenericMetadataParser from_json is async
# we call it from sync, so we need to wrap it in coroutine
Expand Down Expand Up @@ -222,7 +222,7 @@ def etl_load(request: flask.Request):
}, 500


def extract_request_id(jbody: Dict[str, Any]) -> str | None:
def extract_request_id(jbody: Dict[str, Any]) -> tuple[str | None, str | None]:
"""Unwrapp request id from the payload
Args:
Expand Down Expand Up @@ -263,7 +263,7 @@ def extract_request_id(jbody: Dict[str, Any]) -> str | None:

def get_parser_instance(
parser_map: dict, sample_type: str | None, init_params: dict | None
) -> object | None:
) -> tuple[object | None, str | None]:
"""Extract parser name from sample_type
Args:
Expand Down
9 changes: 4 additions & 5 deletions metamist_infrastructure/driver.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# pylint: disable=missing-function-docstring,import-error
"""
Make metamist architecture available to production pulumi stack
so it can be centrally deployed. Do this through a plugin, and submodule.
Expand Down Expand Up @@ -343,7 +342,7 @@ def etl_bigquery_dataset(self):
),
)

def _setup_bq_table(self, schema_file_name: str, table_name: str):
def _setup_bq_table(self, schema_file_name: Path, table_name: str):
"""Setup Bigquery table"""
with open(schema_file_name) as f:
schema = f.read()
Expand Down Expand Up @@ -498,7 +497,7 @@ def etl_load_function(self):
"""etl_load_function"""
return self._etl_function('load', self.etl_load_service_account)

def _etl_function(self, f_name: str, sa: object):
def _etl_function(self, f_name: str, sa: gcp.serviceaccount.Account):
"""
Driver function to setup the etl cloud function
"""
Expand All @@ -511,8 +510,8 @@ def _etl_function(self, f_name: str, sa: object):
str(path_to_func_folder.absolute()),
allowed_extensions=frozenset({'.gz', '.py', '.txt', '.json'}),
# TODO replace with metamist config, once it's available
private_repo_url=self.extra_sample_metadata_config['private_repo_url'],
private_repos=self.extra_sample_metadata_config['private_repos'],
private_repo_url=str(self.extra_sample_metadata_config['private_repo_url']),
private_repos=str(self.extra_sample_metadata_config['private_repos']),
)

# Create the single Cloud Storage object,
Expand Down
6 changes: 3 additions & 3 deletions metamist_infrastructure/slack_notification.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# pylint: disable=missing-function-docstring,import-error
# pylint: disable=missing-function-docstring,import-error,no-member
"""
Make metamist architecture available to production pulumi stack
so it can be centrally deployed. Do this through a plugin, and submodule.
Expand Down Expand Up @@ -56,8 +56,8 @@ def __init__(
self,
project_name: str,
location: str, # e.g. self.config.gcp.region
service_account: object,
source_bucket: object,
service_account: gcp.serviceaccount.Account,
source_bucket: gcp.storage.Bucket,
slack_secret_project_id: str,
slack_token_secret_name: str,
slack_channel_name: str,
Expand Down

0 comments on commit 4db8f9b

Please sign in to comment.