Skip to content

Commit 93a8183

Browse files
committed
Address mypy errors
1 parent e94a4d8 commit 93a8183

39 files changed

+553
-498
lines changed

.pre-commit-config.yaml

+88-84
Original file line numberDiff line numberDiff line change
@@ -1,92 +1,96 @@
11
repos:
2-
- repo: https://github.com/pre-commit/pre-commit-hooks
3-
rev: v4.4.0
4-
hooks:
5-
- id: check-yaml
6-
exclude: '\.*conda/.*'
7-
- id: end-of-file-fixer
8-
- id: trailing-whitespace
9-
exclude: '\.txt$|\.tsv$'
10-
- id: check-case-conflict
11-
- id: check-merge-conflict
12-
- id: detect-private-key
13-
- id: debug-statements
14-
- id: check-added-large-files
2+
- repo: https://github.com/pre-commit/pre-commit-hooks
3+
rev: v4.4.0
4+
hooks:
5+
- id: check-yaml
6+
exclude: '\.*conda/.*'
7+
- id: end-of-file-fixer
8+
- id: trailing-whitespace
9+
exclude: '\.txt$|\.tsv$'
10+
- id: check-case-conflict
11+
- id: check-merge-conflict
12+
- id: detect-private-key
13+
- id: debug-statements
14+
- id: check-added-large-files
1515

16-
- repo: https://github.com/igorshubovych/markdownlint-cli
17-
rev: v0.33.0
18-
hooks:
19-
- id: markdownlint
20-
args: ["--config", ".markdownlint.json"]
16+
- repo: https://github.com/igorshubovych/markdownlint-cli
17+
rev: v0.33.0
18+
hooks:
19+
- id: markdownlint
20+
args: ["--config", ".markdownlint.json"]
2121

22-
- repo: https://github.com/ambv/black
23-
rev: 23.3.0
24-
hooks:
25-
- id: black
26-
args: [.]
27-
pass_filenames: false
28-
always_run: true
29-
exclude: ^metamist/
22+
- repo: https://github.com/ambv/black
23+
rev: 23.3.0
24+
hooks:
25+
- id: black
26+
args: [.]
27+
pass_filenames: false
28+
always_run: true
29+
exclude: ^metamist/
3030

31-
- repo: https://github.com/PyCQA/flake8
32-
rev: "6.0.0"
33-
hooks:
34-
- id: flake8
35-
additional_dependencies: [flake8-bugbear, flake8-quotes]
31+
- repo: https://github.com/PyCQA/flake8
32+
rev: "6.0.0"
33+
hooks:
34+
- id: flake8
35+
additional_dependencies: [flake8-bugbear, flake8-quotes]
3636

37-
# Using system installation of pylint to support checking python module imports
38-
- repo: local
39-
hooks:
40-
- id: pylint
41-
name: pylint
42-
entry: pylint
43-
language: system
44-
types: [python]
37+
# Using system installation of pylint to support checking python module imports
38+
- repo: local
39+
hooks:
40+
- id: pylint
41+
name: pylint
42+
entry: pylint
43+
language: system
44+
types: [python]
4545

46-
# mypy
47-
- repo: https://github.com/pre-commit/mirrors-mypy
48-
rev: v0.961
49-
hooks:
50-
- id: mypy
51-
args:
52-
[
53-
--pretty,
54-
--show-error-codes,
55-
--no-strict-optional,
56-
--ignore-missing-imports,
57-
--install-types,
58-
--non-interactive,
59-
]
60-
additional_dependencies:
61-
- strawberry-graphql[fastapi]==0.138.1
46+
# mypy
47+
- repo: https://github.com/pre-commit/mirrors-mypy
48+
rev: v1.5.1
49+
hooks:
50+
- id: mypy
51+
args:
52+
[
53+
--pretty,
54+
--show-error-codes,
55+
--no-strict-optional,
56+
--ignore-missing-imports,
57+
--install-types,
58+
--non-interactive,
59+
--show-error-context,
60+
--disable-error-code,
61+
operator,
62+
]
63+
additional_dependencies:
64+
- strawberry-graphql[fastapi]==0.206.0
65+
- types-PyMySQL==1.1.0.1
6266

63-
- repo: https://github.com/pre-commit/mirrors-prettier
64-
rev: "v3.0.0-alpha.4"
65-
hooks:
66-
- id: prettier
67-
# I'm not exactly sure why it changes behaviour, but
68-
# calling `cd web`, then calling `ls src/**/*.tsx`
69-
# returns different results to `cd web && ls src/**/*.tsx`
70-
# so just include both patterns here
71-
entry: bash -c 'cd web && prettier --write --ignore-unknown --check src/*.{ts,tsx,css} src/**/*.{ts,tsx,css}'
67+
- repo: https://github.com/pre-commit/mirrors-prettier
68+
rev: "v3.0.0-alpha.4"
69+
hooks:
70+
- id: prettier
71+
# I'm not exactly sure why it changes behaviour, but
72+
# calling `cd web`, then calling `ls src/**/*.tsx`
73+
# returns different results to `cd web && ls src/**/*.tsx`
74+
# so just include both patterns here
75+
entry: bash -c 'cd web && prettier --write --ignore-unknown --check src/*.{ts,tsx,css} src/**/*.{ts,tsx,css}'
7276

73-
- repo: https://github.com/pre-commit/mirrors-eslint
74-
rev: "v8.33.0"
75-
hooks:
76-
- id: eslint
77-
entry: bash -c 'cd web && eslint'
78-
files: \.[jt]sx?$
79-
types: [file]
80-
additional_dependencies:
81-
- eslint@^7.32.0
82-
- eslint-config-airbnb@^19.0.4
83-
- eslint-config-airbnb-base@^15.0.0
84-
- eslint-config-airbnb-typescript@^17.0.0
85-
- eslint-config-prettier@^8.6.0
86-
- eslint-plugin-import@^2.26.0
87-
- eslint-plugin-jsx-a11y@^6.6.1
88-
- eslint-plugin-prettier@^4.2.1
89-
- eslint-plugin-react@^7.31.11
90-
- eslint-plugin-react-hooks@^4.6.0
91-
- "@typescript-eslint/eslint-plugin@^5.48.0"
92-
- "@typescript-eslint/parser@^5.48.0"
77+
- repo: https://github.com/pre-commit/mirrors-eslint
78+
rev: "v8.33.0"
79+
hooks:
80+
- id: eslint
81+
entry: bash -c 'cd web && eslint'
82+
files: \.[jt]sx?$
83+
types: [file]
84+
additional_dependencies:
85+
- eslint@^7.32.0
86+
- eslint-config-airbnb@^19.0.4
87+
- eslint-config-airbnb-base@^15.0.0
88+
- eslint-config-airbnb-typescript@^17.0.0
89+
- eslint-config-prettier@^8.6.0
90+
- eslint-plugin-import@^2.26.0
91+
- eslint-plugin-jsx-a11y@^6.6.1
92+
- eslint-plugin-prettier@^4.2.1
93+
- eslint-plugin-react@^7.31.11
94+
- eslint-plugin-react-hooks@^4.6.0
95+
- "@typescript-eslint/eslint-plugin@^5.48.0"
96+
- "@typescript-eslint/parser@^5.48.0"

api/routes/analysis.py

+42-44
Original file line numberDiff line numberDiff line change
@@ -8,12 +8,11 @@
88
from pydantic import BaseModel
99
from starlette.responses import StreamingResponse
1010

11-
from api.utils.dates import parse_date_only_string
1211
from api.utils.db import (
13-
get_projectless_db_connection,
12+
Connection,
1413
get_project_readonly_connection,
1514
get_project_write_connection,
16-
Connection,
15+
get_projectless_db_connection,
1716
)
1817
from api.utils.export import ExportType
1918
from db.python.layers.analysis import AnalysisLayer
@@ -22,20 +21,17 @@
2221
from db.python.utils import GenericFilter
2322
from models.enums import AnalysisStatus
2423
from models.models.analysis import (
24+
Analysis,
2525
AnalysisInternal,
2626
ProjectSizeModel,
27-
SequencingGroupSizeModel,
28-
DateSizeModel,
29-
Analysis,
3027
)
3128
from models.utils.sample_id_format import (
3229
sample_id_transform_to_raw_list,
33-
sample_id_format,
3430
)
3531
from models.utils.sequencing_group_id_format import (
32+
sequencing_group_id_format,
3633
sequencing_group_id_format_list,
3734
sequencing_group_id_transform_to_raw_list,
38-
sequencing_group_id_format,
3935
)
4036

4137
router = APIRouter(prefix='/analysis', tags=['analysis'])
@@ -326,40 +322,42 @@ async def get_sequencing_group_file_sizes(
326322
"""
327323
Get the per sample file size by type over the given projects and date range
328324
"""
329-
atable = AnalysisLayer(connection)
330-
331-
# Check access to projects
332-
project_ids = None
333-
pt = ProjectPermissionsTable(connection=connection.connection)
334-
project_ids = await pt.get_project_ids_from_names_and_user(
335-
connection.author, project_names, readonly=True
336-
)
337-
338-
# Map from internal pids to project name
339-
prj_name_map = dict(zip(project_ids, project_names))
340-
341-
# Convert dates
342-
start = parse_date_only_string(start_date)
343-
end = parse_date_only_string(end_date)
344-
345-
# Get results with internal ids as keys
346-
results = await atable.get_sequencing_group_file_sizes(
347-
project_ids=project_ids, start_date=start, end_date=end
348-
)
349-
350-
# Convert to the correct output type, converting internal ids to external
351-
fixed_pids: list[Any] = [
352-
ProjectSizeModel(
353-
project=prj_name_map[project_data['project']],
354-
samples=[
355-
SequencingGroupSizeModel(
356-
sample=sample_id_format(s['sample']),
357-
dates=[DateSizeModel(**d) for d in s['dates']],
358-
)
359-
for s in project_data['samples']
360-
],
361-
)
362-
for project_data in results
363-
]
364325

365-
return fixed_pids
326+
raise NotImplementedError('This route is broken, and not properly implemented yet')
327+
# atable = AnalysisLayer(connection)
328+
329+
# # Check access to projects
330+
# project_ids = None
331+
# pt = ProjectPermissionsTable(connection=connection.connection)
332+
# project_ids = await pt.get_project_ids_from_names_and_user(
333+
# connection.author, project_names, readonly=True
334+
# )
335+
336+
# # Map from internal pids to project name
337+
# prj_name_map = dict(zip(project_ids, project_names))
338+
339+
# # Convert dates
340+
# start = parse_date_only_string(start_date)
341+
# end = parse_date_only_string(end_date)
342+
343+
# # Get results with internal ids as keys
344+
# results = await atable.get_sequencing_group_file_sizes(
345+
# project_ids=project_ids, start_date=start, end_date=end
346+
# )
347+
348+
# # Convert to the correct output type, converting internal ids to external
349+
# fixed_pids: list[Any] = [
350+
# ProjectSizeModel(
351+
# project=prj_name_map[project_data['project']],
352+
# samples=[
353+
# SequencingGroupSizeModel(
354+
# sample=sample_id_format(s['sample']),
355+
# dates=[DateSizeModel(**d) for d in s['dates']],
356+
# )
357+
# for s in project_data['samples']
358+
# ],
359+
# )
360+
# for project_data in results
361+
# ]
362+
363+
# return fixed_pids

db/python/connect.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,9 @@ def get_connection_string(self):
121121
if self.port:
122122
_host += f':{self.port}'
123123

124-
options = {} # {'min_size': self.min_pool_size, 'max_size': self.max_pool_size}
124+
options: dict[
125+
str, str | int
126+
] = {} # {'min_size': self.min_pool_size, 'max_size': self.max_pool_size}
125127
_options = '&'.join(f'{k}={v}' for k, v in options.items())
126128

127129
url = f'mysql://{u_p}@{_host}/{self.dbname}?{_options}'

db/python/enum_tables/enums.py

+6-4
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
1-
import re
21
import abc
2+
import re
33
from functools import lru_cache
4+
45
from async_lru import alru_cache
56

67
from db.python.connect import DbBase
@@ -36,7 +37,8 @@ def _get_table_name(cls):
3637
matcher = table_name_matcher.match(tn)
3738
if not matcher:
3839
raise ValueError(
39-
f'The tablename {tn} is not valid (must match {table_name_matcher.pattern})'
40+
f'The tablename {tn} is not valid (must match '
41+
f'{table_name_matcher.pattern})'
4042
)
4143
return tn
4244

@@ -47,9 +49,9 @@ async def get(self) -> list[str]:
4749
"""
4850
_query = f'SELECT DISTINCT name FROM {self._get_table_name()}'
4951
rows = await self.connection.fetch_all(_query)
50-
rows = [r['name'] for r in rows]
52+
nrows = [r['name'] for r in rows]
5153

52-
return rows
54+
return nrows
5355

5456
async def insert(self, value: str):
5557
"""

db/python/layers/seqr.py

+7-7
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
# pylint: disable=unnecessary-lambda-assignment,too-many-locals,broad-exception-caught
22

3+
import asyncio
34
import os
45
import re
5-
import asyncio
66
import traceback
77
from collections import defaultdict
88
from datetime import datetime
@@ -15,29 +15,29 @@
1515
from cpg_utils.cloud import get_google_identity_token
1616

1717
from api.settings import (
18-
SEQR_URL,
1918
SEQR_AUDIENCE,
2019
SEQR_MAP_LOCATION,
2120
SEQR_SLACK_NOTIFICATION_CHANNEL,
21+
SEQR_URL,
2222
get_slack_token,
2323
)
2424
from db.python.connect import Connection
25+
from db.python.enum_tables import SequencingTypeTable
2526
from db.python.layers.analysis import AnalysisLayer
2627
from db.python.layers.base import BaseLayer
2728
from db.python.layers.family import FamilyLayer
2829
from db.python.layers.participant import ParticipantLayer
2930
from db.python.layers.sequencing_group import SequencingGroupLayer
3031
from db.python.tables.analysis import AnalysisFilter
3132
from db.python.tables.project import ProjectPermissionsTable
32-
from db.python.enum_tables import SequencingTypeTable
33-
from db.python.utils import ProjectId, GenericFilter
33+
from db.python.utils import GenericFilter, ProjectId
3434
from models.enums import AnalysisStatus
3535

3636
# literally the most temporary thing ever, but for complete
3737
# automation need to have sample inclusion / exclusion
3838
from models.utils.sequencing_group_id_format import (
39-
sequencing_group_id_format_list,
4039
sequencing_group_id_format,
40+
sequencing_group_id_format_list,
4141
)
4242

4343
SEQUENCING_GROUPS_TO_IGNORE = {22735, 22739}
@@ -421,9 +421,9 @@ async def update_es_index(
421421
)
422422

423423
if len(es_index_analyses) == 0:
424-
return [f'No ES index to synchronise']
424+
return ['No ES index to synchronise']
425425

426-
with AnyPath(fn_path).open('w+') as f:
426+
with AnyPath(fn_path).open('w+') as f: # type: ignore
427427
f.write('\n'.join(rows_to_write))
428428

429429
es_index = es_index_analyses[-1].output

0 commit comments

Comments
 (0)