From bcc5a1b538f8ffe28ab5ec6ed1370e55b5ed42f7 Mon Sep 17 00:00:00 2001 From: Bilal Date: Mon, 29 Jan 2024 11:42:05 +0000 Subject: [PATCH 1/6] update requirement to point to meatadata api and resolve package dependencies --- grpc_service/grpc_model.py | 7 +++++++ requirements-dev.txt | 4 ++-- requirements.txt | 10 +++++----- 3 files changed, 14 insertions(+), 7 deletions(-) diff --git a/grpc_service/grpc_model.py b/grpc_service/grpc_model.py index bfe53c4b..ab341d76 100644 --- a/grpc_service/grpc_model.py +++ b/grpc_service/grpc_model.py @@ -1,11 +1,16 @@ +import logging + from ensembl.production.metadata.grpc import ensembl_metadata_pb2 +logger = logging.getLogger(__name__) + class GRPC_MODEL: def __init__(self, grpc_stub): self.grpc_stub = grpc_stub def get_genome_by_genome_uuid(self, genome_uuid, release_version=None): + logger.debug(f"Received RPC for GetGenomeByUUID with genome_uuid: '{genome_uuid}', release: {release_version}") request = ensembl_metadata_pb2.GenomeUUIDRequest( genome_uuid=genome_uuid, release_version=release_version ) @@ -13,6 +18,7 @@ def get_genome_by_genome_uuid(self, genome_uuid, release_version=None): return response def get_genome_by_keyword(self, keyword, release_version=None): + logger.debug(f"Received RPC for GetGenomesByKeyword with keyword: '{keyword}', release: {release_version}") request = ensembl_metadata_pb2.GenomeByKeywordRequest( keyword=keyword, release_version=release_version ) @@ -20,6 +26,7 @@ def get_genome_by_keyword(self, keyword, release_version=None): return response def get_genome_by_assembly_acc_id(self, assembly_accession_id): + logger.debug(f"Received RPC for GetGenomesByAssemblyAccessionID with assembly_accession_id: '{assembly_accession_id}'") request = ensembl_metadata_pb2.AssemblyAccessionIDRequest( assembly_accession=assembly_accession_id ) diff --git a/requirements-dev.txt b/requirements-dev.txt index ed57647b..3c522d20 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,7 +1,7 @@ -r requirements.txt -pytest==7.1.2 +pytest==7.4.4 pytest-asyncio==0.18.3 snapshottest==0.6.0 pylint==2.14.3 mypy==0.961 -black==22.6.0 +black==22.6.0 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 0b29c301..97b1d786 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,10 +1,10 @@ mongomock==4.0.0 pymongo==4.1.1 -requests==2.28.0 +requests==2.31.0 aiodataloader==0.2.1 ariadne==0.19.1 -python-dotenv==0.20.0 +python-dotenv==0.19.2 uvicorn==0.18.1 -ensembl-metadata-service@git+https://github.com/Ensembl/ensembl-metadata-service.git@0.0.6a7 -grpcio==1.38.1 -grpcio-tools==1.38.1 +ensembl-metadata-api@git+https://github.com/Ensembl/ensembl-metadata-api.git@2.0.0.dev2 +grpcio==1.60.0 +grpcio-tools==1.60.0 \ No newline at end of file From 1076ccd160dfa65e0ac8c7cd4417040dd4887a7c Mon Sep 17 00:00:00 2001 From: Bilal Date: Mon, 29 Jan 2024 15:05:32 +0000 Subject: [PATCH 2/6] print the metadata api version used in graphql extension response object --- common/extensions.py | 7 ++++++- common/utils.py | 15 +++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/common/extensions.py b/common/extensions.py index 764b1316..64bb4f5b 100644 --- a/common/extensions.py +++ b/common/extensions.py @@ -16,6 +16,8 @@ from ariadne.types import Extension, ContextValue +from common import utils + class QueryExecutionTimeExtension(Extension): def __init__(self): @@ -29,4 +31,7 @@ def format(self, context): exec_time_in_secs = round( (time.perf_counter_ns() - self.start_timestamp) / 1000000000, 2 ) - return {"execution_time_in_seconds": exec_time_in_secs} + return { + "execution_time_in_seconds": exec_time_in_secs, + "metadata_api_version": utils.get_ensembl_metadata_api_version() + } diff --git a/common/utils.py b/common/utils.py index 7d36fa3d..4c0029aa 100644 --- a/common/utils.py +++ b/common/utils.py @@ -27,3 +27,18 @@ def check_config_validity(config): for mandatory_field in MANDATORY_FIELDS: if not config.get(mandatory_field): raise KeyError(f"Missing information in configuration file - '{mandatory_field}'") + + +def get_ensembl_metadata_api_version(): + """ + Get the Metadata API tag from requirement.txt file + """ + version = "unknown" # default version + with open('requirements.txt', 'r') as file: + lines = file.readlines() + for line in lines: + if 'ensembl-metadata-api' in line: + # Extract the tag part from the line + version = line.strip().split('@')[-1] + break + return version From 99b5809d03f87f1dad9ee50933daf3395491b06d Mon Sep 17 00:00:00 2001 From: Bilal Date: Tue, 30 Jan 2024 11:50:32 +0000 Subject: [PATCH 3/6] fix some of pylints warnings for CI CD to pass --- .gitlab-ci.yml | 3 +- .pylintrc | 2 + common/crossrefs.py | 8 +- common/extensions.py | 1 + common/utils.py | 6 +- graphql_service/resolver/gene_model.py | 20 ++--- .../resolver/tests/test_resolvers.py | 82 +++++++++---------- graphql_service/server.py | 22 ++--- graphql_service/tests/snapshot_utils.py | 8 +- .../tests/test_genome_retrieval.py | 1 - grpc_service/grpc_model.py | 8 +- 11 files changed, 77 insertions(+), 84 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 9c411b4c..c1045fc0 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -62,6 +62,7 @@ build: only: - develop - main + - hotfix/improve_pylint # Template to deploy application to web's k8s cluster .deploy-web: @@ -75,7 +76,7 @@ build: variables: BASE: k8s/web-prod/overlays/apps rules: - - if: '$CI_DEPLOY_FREEZE == null && $CI_COMMIT_BRANCH == "develop" && $CI_PROJECT_NAMESPACE== "ensembl-apps"' + - if: '$CI_DEPLOY_FREEZE == null && $CI_COMMIT_BRANCH == "hotfix/improve_pylint" && $CI_PROJECT_NAMESPACE== "ensembl-apps"' # deploy to staging at WP-HX staging:wp-hx: diff --git a/.pylintrc b/.pylintrc index a73e7eea..30019d8c 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,5 +1,7 @@ [MASTER] init-hook="from pylint.config import find_pylintrc; import os, sys; sys.path.append(os.path.dirname(find_pylintrc()))" max-line-length=120 +ignore-paths=^graphql_service/tests/snapshots/.*$, + ^graphql_service/tests/fixtures/.*$ disable=missing-class-docstring, missing-function-docstring, line-too-long diff --git a/common/crossrefs.py b/common/crossrefs.py index ed37ff5f..1765fe09 100644 --- a/common/crossrefs.py +++ b/common/crossrefs.py @@ -107,8 +107,8 @@ def _index_identifiers_org_data(self): Provide prefix-based indexes for the flat list of entities from the identifiers.org api """ - for ns in self.identifiers_org_data["payload"]["namespaces"]: - self.id_org_indexed[ns["prefix"]] = ns + for namespace in self.identifiers_org_data["payload"]["namespaces"]: + self.id_org_indexed[namespace["prefix"]] = namespace def generate_url_from_id_org_data(self, xref_acc_id, id_org_ns_prefix): """ @@ -204,9 +204,9 @@ def find_url_using_ens_xref_db_name(self, xref_acc_id, xref_db_name): return xref_base + xref_acc_id # Now get the URL from identifiers.org using the id_org_ns_prefix and xref_id - URL = self.generate_url_from_id_org_data(xref_acc_id, id_org_ns_prefix) + url = self.generate_url_from_id_org_data(xref_acc_id, id_org_ns_prefix) - return URL + return url def annotate_crossref(self, xref): """ diff --git a/common/extensions.py b/common/extensions.py index 64bb4f5b..f0a4dbc0 100644 --- a/common/extensions.py +++ b/common/extensions.py @@ -35,3 +35,4 @@ def format(self, context): "execution_time_in_seconds": exec_time_in_secs, "metadata_api_version": utils.get_ensembl_metadata_api_version() } + return None diff --git a/common/utils.py b/common/utils.py index 4c0029aa..019f49d4 100644 --- a/common/utils.py +++ b/common/utils.py @@ -13,7 +13,7 @@ """ def check_config_validity(config): - MANDATORY_FIELDS = [ + mandatory_fields = [ "mongo_host", "mongo_port", "mongo_user", @@ -24,7 +24,7 @@ def check_config_validity(config): "grpc_host", "grpc_port", ] - for mandatory_field in MANDATORY_FIELDS: + for mandatory_field in mandatory_fields: if not config.get(mandatory_field): raise KeyError(f"Missing information in configuration file - '{mandatory_field}'") @@ -34,7 +34,7 @@ def get_ensembl_metadata_api_version(): Get the Metadata API tag from requirement.txt file """ version = "unknown" # default version - with open('requirements.txt', 'r') as file: + with open('requirements.txt', 'r', encoding='UTF-8') as file: lines = file.readlines() for line in lines: if 'ensembl-metadata-api' in line: diff --git a/graphql_service/resolver/gene_model.py b/graphql_service/resolver/gene_model.py index 480df7c6..209dbc97 100644 --- a/graphql_service/resolver/gene_model.py +++ b/graphql_service/resolver/gene_model.py @@ -17,6 +17,7 @@ from ariadne import QueryType, ObjectType from graphql import GraphQLResolveInfo +from pymongo.collection import Collection from graphql_service.resolver.data_loaders import BatchLoaders @@ -39,7 +40,6 @@ MissingArgumentException, ) -from pymongo.collection import Collection # Define Query types for GraphQL # Don't forget to import these into ariadne_app.py if you add a new type @@ -62,7 +62,7 @@ def resolve_gene( _, info: GraphQLResolveInfo, - byId: Optional[Dict[str, str]] = None, + byId: Optional[Dict[str, str]] = None, # pylint: disable=invalid-name by_id: Optional[Dict[str, str]] = None, ) -> Dict: "Load Gene via stable_id" @@ -180,9 +180,9 @@ def insert_gene_name_urls(gene_metadata: Dict, info: GraphQLResolveInfo) -> Dict def resolve_transcript( _, info: GraphQLResolveInfo, - bySymbol: Optional[Dict[str, str]] = None, + bySymbol: Optional[Dict[str, str]] = None, # pylint: disable=invalid-name by_symbol: Optional[Dict[str, str]] = None, - byId: Optional[Dict[str, str]] = None, + byId: Optional[Dict[str, str]] = None, # pylint: disable=invalid-name by_id: Optional[Dict[str, str]] = None, ) -> Dict: "Load Transcripts by symbol or stable_id" @@ -243,8 +243,8 @@ def resolve_api( try: version_details = get_version_details() return {"api": version_details} - except Exception as e: - logging.error(f"Error resolving API version: {e}") + except Exception as exp: + logging.error(f"Error resolving API version: {exp}") raise @@ -348,8 +348,8 @@ async def resolve_transcript_gene(transcript: Dict, info: GraphQLResolveInfo) -> def resolve_overlap( _, info: GraphQLResolveInfo, - genomeId: Optional[str] = None, - regionName: Optional[str] = None, + genomeId: Optional[str] = None, # pylint: disable=invalid-name + regionName: Optional[str] = None, # pylint: disable=invalid-name start: Optional[int] = None, end: Optional[int] = None, by_slice: Optional[Dict[str, Any]] = None, @@ -715,8 +715,8 @@ def get_version_details() -> Dict[str, str]: logging.error("Version config file not found. Using default values.") except KeyError: logging.error("Version section or keys not found in INI file. Using default values.") - except Exception as e: - logging.error(f"Error reading INI file: {e}. Using default values.") + except Exception as exp: + logging.error(f"Error reading INI file: {exp}. Using default values.") return {"major": "0", "minor": "1", "patch": "0-beta"} diff --git a/graphql_service/resolver/tests/test_resolvers.py b/graphql_service/resolver/tests/test_resolvers.py index c2baeddc..e88c1bd0 100644 --- a/graphql_service/resolver/tests/test_resolvers.py +++ b/graphql_service/resolver/tests/test_resolvers.py @@ -14,16 +14,12 @@ from unittest.mock import Mock import pytest -import mongomock from starlette.datastructures import State import graphql_service.resolver.gene_model as model -from common.crossrefs import XrefResolver -from graphql_service.resolver.data_loaders import BatchLoaders -from common.db import FakeMongoDbClient +from common import crossrefs, db - -def create_GraphQLResolveInfo(database_client): +def create_graphql_resolve_info(database_client): """ Factory for creating the mock Info objects produced by graphql """ @@ -35,14 +31,14 @@ def create_GraphQLResolveInfo(database_client): info.context = { "stuff": "Nonsense", "mongo_db_client": database_client, - "XrefResolver": XrefResolver(from_file="common/tests/mini_identifiers.json"), + "XrefResolver": crossrefs.XrefResolver(from_file="common/tests/mini_identifiers.json"), "request": request_mock, } return info def prepare_mongo_instance(): - mongo_client = FakeMongoDbClient() + mongo_client = db.FakeMongoDbClient() database = mongo_client.mongo_db collection = database.create_collection('uuid_to_collection_mapping') collection.insert_many( @@ -336,7 +332,7 @@ def fixture_genome_data(): def test_resolve_gene(basic_data): "Test the querying of Mongo by gene symbol" - info = create_GraphQLResolveInfo(basic_data) + info = create_graphql_resolve_info(basic_data) # Check we can resolve using byId camelCase result = model.resolve_gene( @@ -376,7 +372,7 @@ def test_resolve_gene(basic_data): def test_resolve_gene_by_symbol(basic_data): "Test querying by gene symbol which can be ambiguous" - info = create_GraphQLResolveInfo(basic_data) + info = create_graphql_resolve_info(basic_data) # Check we can resolve using by_symbol result = model.resolve_genes( @@ -403,7 +399,7 @@ def test_resolve_gene_by_symbol(basic_data): def test_resolve_transcript_by_id(transcript_data): "Test fetching of transcripts by stable ID" - info = create_GraphQLResolveInfo(transcript_data) + info = create_graphql_resolve_info(transcript_data) result = model.resolve_transcript( None, info, byId={"stable_id": "ENST001.1", "genome_id": "1"} ) @@ -414,7 +410,7 @@ def test_resolve_transcript_by_id(transcript_data): def test_resolve_transcript_by_id_not_found(transcript_data): result = None - info = create_GraphQLResolveInfo(transcript_data) + info = create_graphql_resolve_info(transcript_data) with pytest.raises(model.TranscriptNotFoundError) as transcript_not_found_error: result = model.resolve_transcript( None, info, byId={"stable_id": "FAKEYFAKEYFAKEY", "genome_id": "1"} @@ -432,7 +428,7 @@ def test_resolve_transcript_by_id_not_found(transcript_data): def test_resolve_transcript_by_symbol(transcript_data): "Test fetching of transcripts by symbol" - info = create_GraphQLResolveInfo(transcript_data) + info = create_graphql_resolve_info(transcript_data) result = model.resolve_transcript( None, info, bySymbol={"symbol": "kumquat", "genome_id": "1"} ) @@ -440,7 +436,7 @@ def test_resolve_transcript_by_symbol(transcript_data): def test_resolve_transcript_by_symbol_not_found(transcript_data): - info = create_GraphQLResolveInfo(transcript_data) + info = create_graphql_resolve_info(transcript_data) with pytest.raises(model.TranscriptNotFoundError) as transcript_not_found_error: model.resolve_transcript( None, @@ -463,7 +459,7 @@ def test_resolve_transcript_by_symbol_not_found(transcript_data): async def test_resolve_gene_transcripts(transcript_data): "Check the DataLoader for transcripts is working via gene. Requires event loop for DataLoader" - info = create_GraphQLResolveInfo(transcript_data) + info = create_graphql_resolve_info(transcript_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "1") @@ -482,7 +478,7 @@ async def test_resolve_gene_transcripts(transcript_data): async def test_resolve_gene_from_transcript(transcript_data): "Check the DataLoader for gene is working via transcript. Requires event loop for DataLoader" - info = create_GraphQLResolveInfo(transcript_data) + info = create_graphql_resolve_info(transcript_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "1") @@ -499,7 +495,7 @@ async def test_resolve_gene_from_transcript(transcript_data): def test_resolve_overlap(slice_data): "Check features can be found via coordinates" - info = create_GraphQLResolveInfo(slice_data) + info = create_graphql_resolve_info(slice_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "test_genome_id") @@ -535,7 +531,7 @@ def test_resolve_overlap(slice_data): @pytest.mark.parametrize("start,end,expected_ids", query_region_expectations) def test_overlap_region(start, end, expected_ids, slice_data): - info = create_GraphQLResolveInfo(slice_data) + info = create_graphql_resolve_info(slice_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "test_genome_id") @@ -554,7 +550,7 @@ def test_overlap_region(start, end, expected_ids, slice_data): def test_overlap_region_too_many_results(slice_data): - info = create_GraphQLResolveInfo(slice_data) + info = create_graphql_resolve_info(slice_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "test_genome_id") @@ -588,7 +584,7 @@ async def test_resolve_region_happy_case(region_data): "strand": {"code": "forward", "value": 1}, "default": True, } - info = create_GraphQLResolveInfo(region_data) + info = create_graphql_resolve_info(region_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "plasmodium_falciparum_GCA_000002765_2") @@ -599,7 +595,7 @@ async def test_resolve_region_happy_case(region_data): @pytest.mark.asyncio async def test_resolve_region_region_not_exist(region_data): - info = create_GraphQLResolveInfo(region_data) + info = create_graphql_resolve_info(region_data) slc = { "region_id": "some_non_existing_region_id", } @@ -627,7 +623,7 @@ def test_url_generation(basic_data): }, } - info = create_GraphQLResolveInfo(basic_data) + info = create_graphql_resolve_info(basic_data) result = model.insert_crossref_urls({"external_references": [xref]}, info) for key, value in xref.items(): @@ -648,7 +644,7 @@ def test_url_generation(basic_data): async def test_resolve_transcript_products(transcript_data): "Check the DataLoader for products is working via transcript. Requires event loop for DataLoader" - info = create_GraphQLResolveInfo(transcript_data) + info = create_graphql_resolve_info(transcript_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "1") @@ -673,7 +669,7 @@ async def test_resolve_transcript_products_product_not_exists(transcript_data): "genome_id": "1", "product_foreign_key": "adsfadsfa", } - info = create_GraphQLResolveInfo(transcript_data) + info = create_graphql_resolve_info(transcript_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "1") @@ -689,7 +685,7 @@ async def test_resolve_transcript_products_product_not_exists(transcript_data): async def test_resolve_nested_products(transcript_data): "Test products inside transcripts inside the gene" - info = create_GraphQLResolveInfo(transcript_data) + info = create_graphql_resolve_info(transcript_data) gene_result = model.resolve_gene( None, info, byId={"genome_id": "1", "stable_id": "ENSG001.1"} ) @@ -706,7 +702,7 @@ async def test_resolve_nested_products(transcript_data): @pytest.mark.asyncio async def test_resolve_assembly_from_region(genome_data): - info = create_GraphQLResolveInfo(genome_data) + info = create_graphql_resolve_info(genome_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "1") @@ -726,7 +722,7 @@ async def test_resolve_assembly_from_region(genome_data): @pytest.mark.asyncio async def test_resolve_assembly_from_region_not_exists(genome_data): - info = create_GraphQLResolveInfo(genome_data) + info = create_graphql_resolve_info(genome_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "1") @@ -747,7 +743,7 @@ async def test_resolve_assembly_from_region_not_exists(genome_data): @pytest.mark.asyncio async def test_resolve_regions_from_assembly(genome_data): - info = create_GraphQLResolveInfo(genome_data) + info = create_graphql_resolve_info(genome_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "1") @@ -776,7 +772,7 @@ async def test_resolve_regions_from_assembly(genome_data): @pytest.mark.asyncio async def test_resolve_regions_from_assembly_not_exists(genome_data): - info = create_GraphQLResolveInfo(genome_data) + info = create_graphql_resolve_info(genome_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "1") @@ -795,7 +791,7 @@ async def test_resolve_regions_from_assembly_not_exists(genome_data): @pytest.mark.asyncio async def test_resolve_organism_from_assembly(genome_data): - info = create_GraphQLResolveInfo(genome_data) + info = create_graphql_resolve_info(genome_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "1") @@ -814,7 +810,7 @@ async def test_resolve_organism_from_assembly(genome_data): @pytest.mark.asyncio async def test_resolve_organism_from_assembly_not_exists(genome_data): - info = create_GraphQLResolveInfo(genome_data) + info = create_graphql_resolve_info(genome_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "1") @@ -830,7 +826,7 @@ async def test_resolve_organism_from_assembly_not_exists(genome_data): @pytest.mark.asyncio async def test_resolve_assemblies_from_organism(genome_data): - info = create_GraphQLResolveInfo(genome_data) + info = create_graphql_resolve_info(genome_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "1") @@ -862,7 +858,7 @@ async def test_resolve_assemblies_from_organism(genome_data): @pytest.mark.asyncio async def test_resolve_assemblies_from_organism_not_exists(genome_data): - info = create_GraphQLResolveInfo(genome_data) + info = create_graphql_resolve_info(genome_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "1") @@ -883,7 +879,7 @@ async def test_resolve_assemblies_from_organism_not_exists(genome_data): @pytest.mark.asyncio async def test_resolve_species_from_organism(genome_data): - info = create_GraphQLResolveInfo(genome_data) + info = create_graphql_resolve_info(genome_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "1") @@ -906,7 +902,7 @@ async def test_resolve_species_from_organism(genome_data): @pytest.mark.asyncio async def test_resolve_species_from_organism_not_exists(genome_data): - info = create_GraphQLResolveInfo(genome_data) + info = create_graphql_resolve_info(genome_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "1") @@ -926,7 +922,7 @@ async def test_resolve_species_from_organism_not_exists(genome_data): @pytest.mark.asyncio async def test_resolve_organisms_from_species(genome_data): - info = create_GraphQLResolveInfo(genome_data) + info = create_graphql_resolve_info(genome_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "1") @@ -957,7 +953,7 @@ async def test_resolve_organisms_from_species(genome_data): @pytest.mark.asyncio async def test_resolve_organisms_from_species_not_exists(genome_data): - info = create_GraphQLResolveInfo(genome_data) + info = create_graphql_resolve_info(genome_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "1") @@ -976,7 +972,7 @@ async def test_resolve_organisms_from_species_not_exists(genome_data): @pytest.mark.asyncio async def test_resolve_region(region_data): - info = create_GraphQLResolveInfo(region_data) + info = create_graphql_resolve_info(region_data) result = await model.resolve_region( None, @@ -993,7 +989,7 @@ async def test_resolve_region(region_data): @pytest.mark.asyncio async def test_resolve_region_no_results(region_data): - info = create_GraphQLResolveInfo(region_data) + info = create_graphql_resolve_info(region_data) result = None with pytest.raises(model.RegionNotFoundError) as region_not_found_error: @@ -1024,7 +1020,7 @@ async def test_resolve_gene_transcripts_page(): @pytest.mark.asyncio async def test_resolve_transcripts_page_transcripts(transcript_data): - info = create_GraphQLResolveInfo(transcript_data) + info = create_graphql_resolve_info(transcript_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "1") @@ -1047,7 +1043,7 @@ async def test_resolve_transcripts_page_transcripts(transcript_data): @pytest.mark.asyncio async def test_resolve_transcripts_page_transcripts_no_transcripts(transcript_data): - info = create_GraphQLResolveInfo(transcript_data) + info = create_graphql_resolve_info(transcript_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "1") @@ -1059,7 +1055,7 @@ async def test_resolve_transcripts_page_transcripts_no_transcripts(transcript_da @pytest.mark.asyncio async def test_resolve_transcripts_page_metadata(transcript_data): - info = create_GraphQLResolveInfo(transcript_data) + info = create_graphql_resolve_info(transcript_data) # Finding the collection here as we are not using the base resolver model.set_col_conn_for_uuid(info, "1") @@ -1071,7 +1067,7 @@ async def test_resolve_transcripts_page_metadata(transcript_data): def test_collection_lookup_service(basic_data): - info = create_GraphQLResolveInfo(basic_data) + info = create_graphql_resolve_info(basic_data) result1 = model.resolve_gene( None, info, by_id={"stable_id": "ENSG001.1", "genome_id": "1"} diff --git a/graphql_service/server.py b/graphql_service/server.py index e7f06bff..39c1c424 100644 --- a/graphql_service/server.py +++ b/graphql_service/server.py @@ -22,21 +22,17 @@ from ariadne.explorer.template import read_template from ariadne.types import ExtensionList from pymongo import monitoring -from starlette.applications import Starlette -from starlette.middleware import Middleware +from starlette import applications, middleware from starlette.middleware.cors import CORSMiddleware -from common.logger import CommandLogger -from common.crossrefs import XrefResolver -from common import db -from common.utils import check_config_validity +from common import crossrefs, db, extensions, utils from grpc_service import grpc_model -from common.extensions import QueryExecutionTimeExtension from graphql_service.ariadne_app import ( prepare_executable_schema, prepare_context_provider, ) from dotenv import load_dotenv +from common.logger import CommandLogger load_dotenv("connections.conf") @@ -47,7 +43,7 @@ ] = None # mypy will throw an incompatible type error without this type cast # Including the execution time in the response -EXTENSIONS = [QueryExecutionTimeExtension] +EXTENSIONS = [extensions.QueryExecutionTimeExtension] if DEBUG_MODE: log = logging.getLogger() @@ -61,7 +57,7 @@ EXTENSIONS.append(ApolloTracingExtension) -check_config_validity(os.environ) +utils.check_config_validity(os.environ) MONGO_DB_CLIENT = db.MongoDbClient(os.environ) GRPC_SERVER = db.GRPCServiceClient(os.environ) @@ -70,7 +66,7 @@ EXECUTABLE_SCHEMA = prepare_executable_schema() -RESOLVER = XrefResolver(internal_mapping_file="docs/xref_LOD_mapping.json") +RESOLVER = crossrefs.XrefResolver(internal_mapping_file="docs/xref_LOD_mapping.json") CONTEXT_PROVIDER = prepare_context_provider( { @@ -81,7 +77,7 @@ ) starlette_middleware = [ - Middleware(CORSMiddleware, allow_origins=["*"], allow_methods=["GET", "POST"]) + middleware.Middleware(CORSMiddleware, allow_origins=["*"], allow_methods=["GET", "POST"]) ] # The original HTML file can be found under @@ -154,7 +150,7 @@ def __init__( explorer_plugin: bool = True, default_query: str = DEFAULT_QUERY, ): - super(CustomExplorerGraphiQL, self).__init__() + super().__init__() self.parsed_html = render_template( CUSTOM_GRAPHIQL_HTML, { @@ -165,7 +161,7 @@ def __init__( ) -APP = Starlette(debug=DEBUG_MODE, middleware=starlette_middleware) +APP = applications.Starlette(debug=DEBUG_MODE, middleware=starlette_middleware) APP.mount( "/", GraphQL( diff --git a/graphql_service/tests/snapshot_utils.py b/graphql_service/tests/snapshot_utils.py index af4af877..6cc849ed 100644 --- a/graphql_service/tests/snapshot_utils.py +++ b/graphql_service/tests/snapshot_utils.py @@ -11,10 +11,9 @@ See the License for the specific language governing permissions and limitations under the License. """ -from common.crossrefs import XrefResolver +from common import crossrefs, db from graphql_service.ariadne_app import prepare_executable_schema -from graphql_service.resolver.data_loaders import BatchLoaders from graphql_service.tests.fixtures.human_brca2 import ( build_gene, build_transcripts, @@ -25,11 +24,10 @@ build_species, ) from graphql_service.tests.fixtures.wheat import build_wheat_genes -from common.db import FakeMongoDbClient def prepare_mongo_instance(): - mongo_client = FakeMongoDbClient() + mongo_client = db.FakeMongoDbClient() database = mongo_client.mongo_db collection1 = database.create_collection('uuid_to_collection_mapping') collection1.insert_many( @@ -85,7 +83,7 @@ def setup_test(): executable_schema = prepare_executable_schema() mongo_client = prepare_mongo_instance() - xref = XrefResolver(internal_mapping_file="docs/xref_LOD_mapping.json") + xref = crossrefs.XrefResolver(internal_mapping_file="docs/xref_LOD_mapping.json") context = prepare_context_provider(mongo_client, xref) return executable_schema, context diff --git a/graphql_service/tests/test_genome_retrieval.py b/graphql_service/tests/test_genome_retrieval.py index e95365d1..e74bfdd1 100644 --- a/graphql_service/tests/test_genome_retrieval.py +++ b/graphql_service/tests/test_genome_retrieval.py @@ -15,7 +15,6 @@ import pytest from ariadne import graphql -from graphql_service.resolver.data_loaders import BatchLoaders from .snapshot_utils import setup_test executable_schema, context = setup_test() diff --git a/grpc_service/grpc_model.py b/grpc_service/grpc_model.py index ab341d76..57b3ad92 100644 --- a/grpc_service/grpc_model.py +++ b/grpc_service/grpc_model.py @@ -5,12 +5,12 @@ logger = logging.getLogger(__name__) -class GRPC_MODEL: +class GRPC_MODEL: # pylint: disable=invalid-name def __init__(self, grpc_stub): self.grpc_stub = grpc_stub def get_genome_by_genome_uuid(self, genome_uuid, release_version=None): - logger.debug(f"Received RPC for GetGenomeByUUID with genome_uuid: '{genome_uuid}', release: {release_version}") + logger.debug("Received RPC for GetGenomeByUUID with genome_uuid: '%s', release: %s", genome_uuid, release_version) request = ensembl_metadata_pb2.GenomeUUIDRequest( genome_uuid=genome_uuid, release_version=release_version ) @@ -18,7 +18,7 @@ def get_genome_by_genome_uuid(self, genome_uuid, release_version=None): return response def get_genome_by_keyword(self, keyword, release_version=None): - logger.debug(f"Received RPC for GetGenomesByKeyword with keyword: '{keyword}', release: {release_version}") + logger.debug("Received RPC for GetGenomesByKeyword with keyword: '%s', release: %s", keyword, release_version) request = ensembl_metadata_pb2.GenomeByKeywordRequest( keyword=keyword, release_version=release_version ) @@ -26,7 +26,7 @@ def get_genome_by_keyword(self, keyword, release_version=None): return response def get_genome_by_assembly_acc_id(self, assembly_accession_id): - logger.debug(f"Received RPC for GetGenomesByAssemblyAccessionID with assembly_accession_id: '{assembly_accession_id}'") + logger.debug("Received RPC for GetGenomesByAssemblyAccessionID with assembly_accession_id: '%s'", assembly_accession_id) request = ensembl_metadata_pb2.AssemblyAccessionIDRequest( assembly_accession=assembly_accession_id ) From 26daab2768ac49becff7786b7faac633a20dada1 Mon Sep 17 00:00:00 2001 From: Bilal Date: Tue, 30 Jan 2024 12:13:01 +0000 Subject: [PATCH 4/6] improve github actions --- .github/workflows/pytest.yml | 21 ++++++++++++++++----- .gitlab-ci.yml | 3 +-- 2 files changed, 17 insertions(+), 7 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index d48dad4a..c213f531 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -1,25 +1,31 @@ # Workflow name -name: PyTest and Black +name: PyTest, Black and Pylint # Controls when the workflow will run on: - # Triggers the workflow on pull request (on main only) events + # Triggers the workflow on pull request (on main and develop only) events pull_request: branches: - main + - develop # Allows you to run this workflow manually from the Actions tab workflow_dispatch: # A workflow run is made up of one or more jobs that can run sequentially or in parallel jobs: - # This workflow contains a single job called "test" - test: + # This workflow contains a single job called "tests" + tests: # The type of runner that the job will run on and timeout in minutes name: Run Python Tests and Black formatter runs-on: ubuntu-latest timeout-minutes: 10 + # Include a strategy matrix in order to allow the job to run multiple times with different versions of Python + strategy: + matrix: + python-version: [3.8, 3.9] + # Steps represent a sequence of tasks that will be executed as part of the job steps: # Checks-out our repository under $GITHUB_WORKSPACE, so our job can access it @@ -47,4 +53,9 @@ jobs: # Check code has been formatted - name: Run Black code formatter run: | - black . --check --verbose --diff --color \ No newline at end of file + black . --check --verbose --diff --color + + # Run Pylint + - name: Run Pylint + run: | + pylint $(git ls-files '*.py') --fail-under=9.5 diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index c1045fc0..9c411b4c 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -62,7 +62,6 @@ build: only: - develop - main - - hotfix/improve_pylint # Template to deploy application to web's k8s cluster .deploy-web: @@ -76,7 +75,7 @@ build: variables: BASE: k8s/web-prod/overlays/apps rules: - - if: '$CI_DEPLOY_FREEZE == null && $CI_COMMIT_BRANCH == "hotfix/improve_pylint" && $CI_PROJECT_NAMESPACE== "ensembl-apps"' + - if: '$CI_DEPLOY_FREEZE == null && $CI_COMMIT_BRANCH == "develop" && $CI_PROJECT_NAMESPACE== "ensembl-apps"' # deploy to staging at WP-HX staging:wp-hx: From c3dae3eb6e659eb3bf51d10c14986bfaa2622f7e Mon Sep 17 00:00:00 2001 From: Bilal Date: Tue, 30 Jan 2024 13:05:42 +0000 Subject: [PATCH 5/6] update python version in guthub action --- .github/workflows/{pytest.yml => tests.yml} | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) rename .github/workflows/{pytest.yml => tests.yml} (92%) diff --git a/.github/workflows/pytest.yml b/.github/workflows/tests.yml similarity index 92% rename from .github/workflows/pytest.yml rename to .github/workflows/tests.yml index c213f531..cd0fdb36 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/tests.yml @@ -32,11 +32,11 @@ jobs: - name: Check out repository code uses: actions/checkout@v3 - # Set up Python version - - name: Set up Python 3.7 + # Set up Python version from the matrix + - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: - python-version: '3.7' + python-version: ${{ matrix.python-version }} # Runs a set of commands installing Python dependencies using the runners shell (Run a multi-line script) - name: Install Python dependencies From 08ee5403954a5e02bc3e9cd7e2e0a79fa39f5923 Mon Sep 17 00:00:00 2001 From: Bilal Date: Tue, 30 Jan 2024 13:09:43 +0000 Subject: [PATCH 6/6] Black it (I hate you black...) --- .github/workflows/tests.yml | 2 +- common/db.py | 13 +++++-- common/extensions.py | 2 +- common/utils.py | 11 ++++-- graphql_service/resolver/gene_model.py | 26 ++++++------- .../resolver/tests/test_resolvers.py | 37 +++++++++++-------- graphql_service/server.py | 4 +- graphql_service/tests/snapshot_utils.py | 17 ++++----- .../snapshots/snap_test_version_retrieval.py | 10 +---- graphql_service/tests/test_gene_retrieval.py | 1 + .../tests/test_transcript_retrieval.py | 4 +- grpc_service/grpc_model.py | 17 +++++++-- 12 files changed, 81 insertions(+), 63 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index cd0fdb36..f21e7573 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -17,7 +17,7 @@ jobs: # This workflow contains a single job called "tests" tests: # The type of runner that the job will run on and timeout in minutes - name: Run Python Tests and Black formatter + name: Run Python Tests, Black formatter and Pylint runs-on: ubuntu-latest timeout-minutes: 10 diff --git a/common/db.py b/common/db.py index a2571abc..89537462 100644 --- a/common/db.py +++ b/common/db.py @@ -45,7 +45,9 @@ def get_collection_conn(self, uuid): # Fallback to the collection in the configuration file if no data collection is found for the given UUID if not data_collection: data_collection_name = self.config.get("mongo_default_collection") - print(f"Falling back to the default collection '{data_collection_name}' for '{uuid}' UUID") + print( + f"Falling back to the default collection '{data_collection_name}' for '{uuid}' UUID" + ) else: data_collection_name = data_collection.get("collection") print(f"Using '{data_collection_name}' collection for '{uuid}' UUID") @@ -85,20 +87,23 @@ class FakeMongoDbClient: """ Sets up a mongomock collection for thoas code to test with """ + def __init__(self): mongo_client = mongomock.MongoClient() self.mongo_db = mongo_client.db def get_collection_conn(self, uuid): - lookup_service_collection = 'uuid_to_collection_mapping' + lookup_service_collection = "uuid_to_collection_mapping" query = {"uuid": uuid, "is_current": True} data_collection = self.mongo_db[lookup_service_collection].find_one(query) # Fallback to the default collection if no collection found in the mappings # for the given UUID if not data_collection: - data_collection_name = 'collection1' - print(f"Falling back to the default collection '{data_collection_name}' for '{uuid}' UUID") + data_collection_name = "collection1" + print( + f"Falling back to the default collection '{data_collection_name}' for '{uuid}' UUID" + ) else: data_collection_name = data_collection.get("collection") print(f"Using '{data_collection_name}' collection for '{uuid}' UUID") diff --git a/common/extensions.py b/common/extensions.py index f0a4dbc0..bb2daec5 100644 --- a/common/extensions.py +++ b/common/extensions.py @@ -33,6 +33,6 @@ def format(self, context): ) return { "execution_time_in_seconds": exec_time_in_secs, - "metadata_api_version": utils.get_ensembl_metadata_api_version() + "metadata_api_version": utils.get_ensembl_metadata_api_version(), } return None diff --git a/common/utils.py b/common/utils.py index 019f49d4..951c5b35 100644 --- a/common/utils.py +++ b/common/utils.py @@ -12,6 +12,7 @@ limitations under the License. """ + def check_config_validity(config): mandatory_fields = [ "mongo_host", @@ -26,7 +27,9 @@ def check_config_validity(config): ] for mandatory_field in mandatory_fields: if not config.get(mandatory_field): - raise KeyError(f"Missing information in configuration file - '{mandatory_field}'") + raise KeyError( + f"Missing information in configuration file - '{mandatory_field}'" + ) def get_ensembl_metadata_api_version(): @@ -34,11 +37,11 @@ def get_ensembl_metadata_api_version(): Get the Metadata API tag from requirement.txt file """ version = "unknown" # default version - with open('requirements.txt', 'r', encoding='UTF-8') as file: + with open("requirements.txt", "r", encoding="UTF-8") as file: lines = file.readlines() for line in lines: - if 'ensembl-metadata-api' in line: + if "ensembl-metadata-api" in line: # Extract the tag part from the line - version = line.strip().split('@')[-1] + version = line.strip().split("@")[-1] break return version diff --git a/graphql_service/resolver/gene_model.py b/graphql_service/resolver/gene_model.py index 209dbc97..e2c04aa7 100644 --- a/graphql_service/resolver/gene_model.py +++ b/graphql_service/resolver/gene_model.py @@ -95,7 +95,6 @@ def resolve_gene( if not result: raise GeneNotFoundError(by_id=by_id) - return result @@ -187,7 +186,6 @@ def resolve_transcript( ) -> Dict: "Load Transcripts by symbol or stable_id" - if by_symbol is None: by_symbol = bySymbol if by_id is None: @@ -234,7 +232,8 @@ def resolve_transcript( @QUERY_TYPE.field("version") def resolve_api( - _: None, info: GraphQLResolveInfo # the second argument must be named `info` to avoid a NameError + _: None, + info: GraphQLResolveInfo, # the second argument must be named `info` to avoid a NameError ) -> Dict[str, Dict[str, str]]: """ Resolve the API version. @@ -702,19 +701,21 @@ def get_version_details() -> Dict[str, str]: config = configparser.ConfigParser() try: - if not config.read('version_config.ini'): + if not config.read("version_config.ini"): raise FileNotFoundError("INI file not found.") - version_data = config['version'] + version_data = config["version"] return { - "major": version_data['major'], - "minor": version_data['minor'], - "patch": version_data['patch'] + "major": version_data["major"], + "minor": version_data["minor"], + "patch": version_data["patch"], } except FileNotFoundError: logging.error("Version config file not found. Using default values.") except KeyError: - logging.error("Version section or keys not found in INI file. Using default values.") + logging.error( + "Version section or keys not found in INI file. Using default values." + ) except Exception as exp: logging.error(f"Error reading INI file: {exp}. Using default values.") @@ -750,8 +751,7 @@ def set_col_conn_for_uuid(info, uuid): col_conn = info.context["mongo_db_client"].get_collection_conn(uuid) - conn = {'col_conn': col_conn, - 'data_loader': BatchLoaders(col_conn)} + conn = {"col_conn": col_conn, "data_loader": BatchLoaders(col_conn)} parent_key = get_path_parent_key(info) info.context.setdefault(parent_key, conn) @@ -763,12 +763,12 @@ def set_col_conn_for_uuid(info, uuid): def get_col_conn(info): parent_key = get_path_parent_key(info) - return info.context[parent_key]['col_conn'] + return info.context[parent_key]["col_conn"] def get_data_loader(info): parent_key = get_path_parent_key(info) - return info.context[parent_key]['data_loader'] + return info.context[parent_key]["data_loader"] def get_path_parent_key(info): diff --git a/graphql_service/resolver/tests/test_resolvers.py b/graphql_service/resolver/tests/test_resolvers.py index e88c1bd0..ffcc6c54 100644 --- a/graphql_service/resolver/tests/test_resolvers.py +++ b/graphql_service/resolver/tests/test_resolvers.py @@ -19,19 +19,22 @@ import graphql_service.resolver.gene_model as model from common import crossrefs, db + def create_graphql_resolve_info(database_client): """ Factory for creating the mock Info objects produced by graphql """ info = Mock() - attrs = {'as_list.return_value': ['test_feature']} + attrs = {"as_list.return_value": ["test_feature"]} info.path = Mock(**attrs) request_mock = Mock() request_mock.state = State() info.context = { "stuff": "Nonsense", "mongo_db_client": database_client, - "XrefResolver": crossrefs.XrefResolver(from_file="common/tests/mini_identifiers.json"), + "XrefResolver": crossrefs.XrefResolver( + from_file="common/tests/mini_identifiers.json" + ), "request": request_mock, } return info @@ -40,38 +43,39 @@ def create_graphql_resolve_info(database_client): def prepare_mongo_instance(): mongo_client = db.FakeMongoDbClient() database = mongo_client.mongo_db - collection = database.create_collection('uuid_to_collection_mapping') + collection = database.create_collection("uuid_to_collection_mapping") collection.insert_many( [ { "uuid": "1", "collection": "collection1", "is_current": True, - "load_date": "2023-06-29T17:00:41.510Z" + "load_date": "2023-06-29T17:00:41.510Z", }, { "uuid": "2", "collection": "collection2", "is_current": True, - "load_date": "2023-06-29T17:00:41.736Z" + "load_date": "2023-06-29T17:00:41.736Z", }, { "uuid": "test_genome_id", "collection": "collection1", "is_current": True, - "load_date": "2023-06-29T17:00:41.736Z" + "load_date": "2023-06-29T17:00:41.736Z", }, { "uuid": "plasmodium_falciparum_GCA_000002765_2", "collection": "collection1", "is_current": True, - "load_date": "2023-06-29T17:00:41.736Z" - } + "load_date": "2023-06-29T17:00:41.736Z", + }, ] ) return mongo_client + @pytest.fixture(name="basic_data") def fixture_basic_data(): "Some fake genes" @@ -79,7 +83,7 @@ def fixture_basic_data(): mongo_client = prepare_mongo_instance() database = mongo_client.mongo_db - collection1 = database.create_collection('collection1') + collection1 = database.create_collection("collection1") collection1.insert_many( [ @@ -102,7 +106,7 @@ def fixture_basic_data(): ] ) - collection2 = database.create_collection('collection2') + collection2 = database.create_collection("collection2") collection2.insert_many( [ @@ -137,7 +141,7 @@ def fixture_transcript_data(): mongo_client = prepare_mongo_instance() database = mongo_client.mongo_db - collection = database.create_collection('collection1') + collection = database.create_collection("collection1") collection.insert_many( [ { @@ -190,7 +194,7 @@ def fixture_region_data(): mongo_client = prepare_mongo_instance() database = mongo_client.mongo_db - collection = database.create_collection('collection1') + collection = database.create_collection("collection1") collection.insert_many( [ @@ -225,7 +229,7 @@ def fixture_slice_data(): mongo_client = prepare_mongo_instance() database = mongo_client.mongo_db - collection = database.create_collection('collection1') + collection = database.create_collection("collection1") collection.insert_many( [ @@ -280,7 +284,7 @@ def fixture_genome_data(): mongo_client = prepare_mongo_instance() database = mongo_client.mongo_db - collection = database.create_collection('collection1') + collection = database.create_collection("collection1") collection.insert_many( [ { @@ -369,6 +373,7 @@ def test_resolve_gene(basic_data): assert result["symbol"] == "banana" + def test_resolve_gene_by_symbol(basic_data): "Test querying by gene symbol which can be ambiguous" @@ -422,7 +427,7 @@ def test_resolve_transcript_by_id_not_found(transcript_data): ) assert transcript_not_found_error.value.extensions["code"] == "TRANSCRIPT_NOT_FOUND" assert transcript_not_found_error.value.extensions["stable_id"] == "FAKEYFAKEYFAKEY" - assert transcript_not_found_error.value.extensions["genome_id"] == '1' + assert transcript_not_found_error.value.extensions["genome_id"] == "1" def test_resolve_transcript_by_symbol(transcript_data): @@ -1078,7 +1083,7 @@ def test_collection_lookup_service(basic_data): # In the application, Path is set by GraphQL. # As we are not using GraphQL's Path but a Mock, # we need to set its value manually. - attrs = {'as_list.return_value': ['test_feature2']} + attrs = {"as_list.return_value": ["test_feature2"]} info.path = Mock(**attrs) result2 = model.resolve_gene( diff --git a/graphql_service/server.py b/graphql_service/server.py index 39c1c424..27068c91 100644 --- a/graphql_service/server.py +++ b/graphql_service/server.py @@ -77,7 +77,9 @@ ) starlette_middleware = [ - middleware.Middleware(CORSMiddleware, allow_origins=["*"], allow_methods=["GET", "POST"]) + middleware.Middleware( + CORSMiddleware, allow_origins=["*"], allow_methods=["GET", "POST"] + ) ] # The original HTML file can be found under diff --git a/graphql_service/tests/snapshot_utils.py b/graphql_service/tests/snapshot_utils.py index 6cc849ed..65bfa223 100644 --- a/graphql_service/tests/snapshot_utils.py +++ b/graphql_service/tests/snapshot_utils.py @@ -29,25 +29,25 @@ def prepare_mongo_instance(): mongo_client = db.FakeMongoDbClient() database = mongo_client.mongo_db - collection1 = database.create_collection('uuid_to_collection_mapping') + collection1 = database.create_collection("uuid_to_collection_mapping") collection1.insert_many( [ { "uuid": "homo_sapiens_GCA_000001405_28", "collection": "collection2", "is_current": True, - "load_date": "2023-06-29T17:00:41.510Z" + "load_date": "2023-06-29T17:00:41.510Z", }, { "uuid": "triticum_aestivum_GCA_900519105_1", "collection": "collection2", "is_current": True, - "load_date": "2023-06-29T17:00:41.736Z" - } + "load_date": "2023-06-29T17:00:41.736Z", + }, ] ) - collection2 = database.create_collection('collection2') + collection2 = database.create_collection("collection2") collection2.insert_one(build_gene()) collection2.insert_many(build_transcripts()) collection2.insert_many(build_products()) @@ -68,13 +68,12 @@ def prepare_context_provider(mongo_client, xref): # is assigned to context_value which gets evaluated at the beginning # of every request. def context_provider(): - context = { - "mongo_db_client": mongo_client, - "XrefResolver": xref - } + context = {"mongo_db_client": mongo_client, "XrefResolver": xref} return context + return context_provider + def setup_test(): """ Run setup scripts once per module diff --git a/graphql_service/tests/snapshots/snap_test_version_retrieval.py b/graphql_service/tests/snapshots/snap_test_version_retrieval.py index 8cd02348..1ce51a79 100644 --- a/graphql_service/tests/snapshots/snap_test_version_retrieval.py +++ b/graphql_service/tests/snapshots/snap_test_version_retrieval.py @@ -7,12 +7,6 @@ snapshots = Snapshot() -snapshots['test_version_retrieval 1'] = { - 'version': { - 'api': { - 'major': '0', - 'minor': '2', - 'patch': '0-beta' - } - } +snapshots["test_version_retrieval 1"] = { + "version": {"api": {"major": "0", "minor": "2", "patch": "0-beta"}} } diff --git a/graphql_service/tests/test_gene_retrieval.py b/graphql_service/tests/test_gene_retrieval.py index 4ae0ac20..f10181f9 100644 --- a/graphql_service/tests/test_gene_retrieval.py +++ b/graphql_service/tests/test_gene_retrieval.py @@ -18,6 +18,7 @@ executable_schema, context = setup_test() + @pytest.mark.asyncio async def test_gene_retrieval_by_id_camel_case(snapshot): "Test `gene` query using byId camelCase" diff --git a/graphql_service/tests/test_transcript_retrieval.py b/graphql_service/tests/test_transcript_retrieval.py index 60f6061a..e38ccb66 100644 --- a/graphql_service/tests/test_transcript_retrieval.py +++ b/graphql_service/tests/test_transcript_retrieval.py @@ -161,9 +161,7 @@ async def test_transcript_splicing(snapshot): } }""" (success, result) = await graphql( - executable_schema, - {"query": query}, - context_value=context() + executable_schema, {"query": query}, context_value=context() ) assert success assert result["data"]["transcript"] diff --git a/grpc_service/grpc_model.py b/grpc_service/grpc_model.py index 57b3ad92..320b08e5 100644 --- a/grpc_service/grpc_model.py +++ b/grpc_service/grpc_model.py @@ -10,7 +10,11 @@ def __init__(self, grpc_stub): self.grpc_stub = grpc_stub def get_genome_by_genome_uuid(self, genome_uuid, release_version=None): - logger.debug("Received RPC for GetGenomeByUUID with genome_uuid: '%s', release: %s", genome_uuid, release_version) + logger.debug( + "Received RPC for GetGenomeByUUID with genome_uuid: '%s', release: %s", + genome_uuid, + release_version, + ) request = ensembl_metadata_pb2.GenomeUUIDRequest( genome_uuid=genome_uuid, release_version=release_version ) @@ -18,7 +22,11 @@ def get_genome_by_genome_uuid(self, genome_uuid, release_version=None): return response def get_genome_by_keyword(self, keyword, release_version=None): - logger.debug("Received RPC for GetGenomesByKeyword with keyword: '%s', release: %s", keyword, release_version) + logger.debug( + "Received RPC for GetGenomesByKeyword with keyword: '%s', release: %s", + keyword, + release_version, + ) request = ensembl_metadata_pb2.GenomeByKeywordRequest( keyword=keyword, release_version=release_version ) @@ -26,7 +34,10 @@ def get_genome_by_keyword(self, keyword, release_version=None): return response def get_genome_by_assembly_acc_id(self, assembly_accession_id): - logger.debug("Received RPC for GetGenomesByAssemblyAccessionID with assembly_accession_id: '%s'", assembly_accession_id) + logger.debug( + "Received RPC for GetGenomesByAssemblyAccessionID with assembly_accession_id: '%s'", + assembly_accession_id, + ) request = ensembl_metadata_pb2.AssemblyAccessionIDRequest( assembly_accession=assembly_accession_id )