Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix some of Pylint warnings for CI CD to pass #137

Merged
merged 4 commits into from
Feb 15, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 20 additions & 9 deletions .github/workflows/pytest.yml → .github/workflows/tests.yml
Original file line number Diff line number Diff line change
@@ -1,36 +1,42 @@
# Workflow name
name: PyTest and Black
name: PyTest, Black and Pylint

# Controls when the workflow will run
on:
# Triggers the workflow on pull request (on main only) events
# Triggers the workflow on pull request (on main and develop only) events
pull_request:
branches:
- main
- develop

# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:

# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# This workflow contains a single job called "test"
test:
# This workflow contains a single job called "tests"
tests:
# The type of runner that the job will run on and timeout in minutes
name: Run Python Tests and Black formatter
name: Run Python Tests, Black formatter and Pylint
runs-on: ubuntu-latest
timeout-minutes: 10

# Include a strategy matrix in order to allow the job to run multiple times with different versions of Python
strategy:
matrix:
python-version: [3.8, 3.9]

# Steps represent a sequence of tasks that will be executed as part of the job
steps:
# Checks-out our repository under $GITHUB_WORKSPACE, so our job can access it
- name: Check out repository code
uses: actions/checkout@v3

# Set up Python version
- name: Set up Python 3.7
# Set up Python version from the matrix
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: '3.7'
python-version: ${{ matrix.python-version }}

# Runs a set of commands installing Python dependencies using the runners shell (Run a multi-line script)
- name: Install Python dependencies
Expand All @@ -47,4 +53,9 @@ jobs:
# Check code has been formatted
- name: Run Black code formatter
run: |
black . --check --verbose --diff --color
black . --check --verbose --diff --color

# Run Pylint
- name: Run Pylint
run: |
pylint $(git ls-files '*.py') --fail-under=9.5
2 changes: 2 additions & 0 deletions .pylintrc
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
[MASTER]
init-hook="from pylint.config import find_pylintrc; import os, sys; sys.path.append(os.path.dirname(find_pylintrc()))"
max-line-length=120
ignore-paths=^graphql_service/tests/snapshots/.*$,
^graphql_service/tests/fixtures/.*$

disable=missing-class-docstring, missing-function-docstring, line-too-long
8 changes: 4 additions & 4 deletions common/crossrefs.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,8 +107,8 @@ def _index_identifiers_org_data(self):
Provide prefix-based indexes for the flat list of entities from
the identifiers.org api
"""
for ns in self.identifiers_org_data["payload"]["namespaces"]:
self.id_org_indexed[ns["prefix"]] = ns
for namespace in self.identifiers_org_data["payload"]["namespaces"]:
self.id_org_indexed[namespace["prefix"]] = namespace

def generate_url_from_id_org_data(self, xref_acc_id, id_org_ns_prefix):
"""
Expand Down Expand Up @@ -204,9 +204,9 @@ def find_url_using_ens_xref_db_name(self, xref_acc_id, xref_db_name):
return xref_base + xref_acc_id

# Now get the URL from identifiers.org using the id_org_ns_prefix and xref_id
URL = self.generate_url_from_id_org_data(xref_acc_id, id_org_ns_prefix)
url = self.generate_url_from_id_org_data(xref_acc_id, id_org_ns_prefix)

return URL
return url

def annotate_crossref(self, xref):
"""
Expand Down
13 changes: 9 additions & 4 deletions common/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,9 @@ def get_collection_conn(self, uuid):
# Fallback to the collection in the configuration file if no data collection is found for the given UUID
if not data_collection:
data_collection_name = self.config.get("mongo_default_collection")
print(f"Falling back to the default collection '{data_collection_name}' for '{uuid}' UUID")
print(
f"Falling back to the default collection '{data_collection_name}' for '{uuid}' UUID"
)
else:
data_collection_name = data_collection.get("collection")
print(f"Using '{data_collection_name}' collection for '{uuid}' UUID")
Expand Down Expand Up @@ -85,20 +87,23 @@ class FakeMongoDbClient:
"""
Sets up a mongomock collection for thoas code to test with
"""

def __init__(self):
mongo_client = mongomock.MongoClient()
self.mongo_db = mongo_client.db

def get_collection_conn(self, uuid):
lookup_service_collection = 'uuid_to_collection_mapping'
lookup_service_collection = "uuid_to_collection_mapping"
query = {"uuid": uuid, "is_current": True}
data_collection = self.mongo_db[lookup_service_collection].find_one(query)

# Fallback to the default collection if no collection found in the mappings
# for the given UUID
if not data_collection:
data_collection_name = 'collection1'
print(f"Falling back to the default collection '{data_collection_name}' for '{uuid}' UUID")
data_collection_name = "collection1"
print(
f"Falling back to the default collection '{data_collection_name}' for '{uuid}' UUID"
)
else:
data_collection_name = data_collection.get("collection")
print(f"Using '{data_collection_name}' collection for '{uuid}' UUID")
Expand Down
3 changes: 2 additions & 1 deletion common/extensions.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,5 +33,6 @@ def format(self, context):
)
return {
"execution_time_in_seconds": exec_time_in_secs,
"metadata_api_version": utils.get_ensembl_metadata_api_version()
"metadata_api_version": utils.get_ensembl_metadata_api_version(),
}
return None
15 changes: 9 additions & 6 deletions common/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@
limitations under the License.
"""


def check_config_validity(config):
MANDATORY_FIELDS = [
mandatory_fields = [
"mongo_host",
"mongo_port",
"mongo_user",
Expand All @@ -24,21 +25,23 @@ def check_config_validity(config):
"grpc_host",
"grpc_port",
]
for mandatory_field in MANDATORY_FIELDS:
for mandatory_field in mandatory_fields:
if not config.get(mandatory_field):
raise KeyError(f"Missing information in configuration file - '{mandatory_field}'")
raise KeyError(
f"Missing information in configuration file - '{mandatory_field}'"
)


def get_ensembl_metadata_api_version():
"""
Get the Metadata API tag from requirement.txt file
"""
version = "unknown" # default version
with open('requirements.txt', 'r') as file:
with open("requirements.txt", "r", encoding="UTF-8") as file:
lines = file.readlines()
for line in lines:
if 'ensembl-metadata-api' in line:
if "ensembl-metadata-api" in line:
# Extract the tag part from the line
version = line.strip().split('@')[-1]
version = line.strip().split("@")[-1]
break
return version
46 changes: 23 additions & 23 deletions graphql_service/resolver/gene_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

from ariadne import QueryType, ObjectType
from graphql import GraphQLResolveInfo
from pymongo.collection import Collection

from graphql_service.resolver.data_loaders import BatchLoaders

Expand All @@ -39,7 +40,6 @@
MissingArgumentException,
)

from pymongo.collection import Collection

# Define Query types for GraphQL
# Don't forget to import these into ariadne_app.py if you add a new type
Expand All @@ -62,7 +62,7 @@
def resolve_gene(
_,
info: GraphQLResolveInfo,
byId: Optional[Dict[str, str]] = None,
byId: Optional[Dict[str, str]] = None, # pylint: disable=invalid-name
by_id: Optional[Dict[str, str]] = None,
) -> Dict:
"Load Gene via stable_id"
Expand Down Expand Up @@ -95,7 +95,6 @@ def resolve_gene(
if not result:
raise GeneNotFoundError(by_id=by_id)


return result


Expand Down Expand Up @@ -180,14 +179,13 @@ def insert_gene_name_urls(gene_metadata: Dict, info: GraphQLResolveInfo) -> Dict
def resolve_transcript(
_,
info: GraphQLResolveInfo,
bySymbol: Optional[Dict[str, str]] = None,
bySymbol: Optional[Dict[str, str]] = None, # pylint: disable=invalid-name
by_symbol: Optional[Dict[str, str]] = None,
byId: Optional[Dict[str, str]] = None,
byId: Optional[Dict[str, str]] = None, # pylint: disable=invalid-name
by_id: Optional[Dict[str, str]] = None,
) -> Dict:
"Load Transcripts by symbol or stable_id"


if by_symbol is None:
by_symbol = bySymbol
if by_id is None:
Expand Down Expand Up @@ -234,7 +232,8 @@ def resolve_transcript(

@QUERY_TYPE.field("version")
def resolve_api(
_: None, info: GraphQLResolveInfo # the second argument must be named `info` to avoid a NameError
_: None,
info: GraphQLResolveInfo, # the second argument must be named `info` to avoid a NameError
) -> Dict[str, Dict[str, str]]:
"""
Resolve the API version.
Expand All @@ -243,8 +242,8 @@ def resolve_api(
try:
version_details = get_version_details()
return {"api": version_details}
except Exception as e:
logging.error(f"Error resolving API version: {e}")
except Exception as exp:
logging.error(f"Error resolving API version: {exp}")
raise


Expand Down Expand Up @@ -348,8 +347,8 @@ async def resolve_transcript_gene(transcript: Dict, info: GraphQLResolveInfo) ->
def resolve_overlap(
_,
info: GraphQLResolveInfo,
genomeId: Optional[str] = None,
regionName: Optional[str] = None,
genomeId: Optional[str] = None, # pylint: disable=invalid-name
regionName: Optional[str] = None, # pylint: disable=invalid-name
start: Optional[int] = None,
end: Optional[int] = None,
by_slice: Optional[Dict[str, Any]] = None,
Expand Down Expand Up @@ -702,21 +701,23 @@ def get_version_details() -> Dict[str, str]:
config = configparser.ConfigParser()

try:
if not config.read('version_config.ini'):
if not config.read("version_config.ini"):
raise FileNotFoundError("INI file not found.")

version_data = config['version']
version_data = config["version"]
return {
"major": version_data['major'],
"minor": version_data['minor'],
"patch": version_data['patch']
"major": version_data["major"],
"minor": version_data["minor"],
"patch": version_data["patch"],
}
except FileNotFoundError:
logging.error("Version config file not found. Using default values.")
except KeyError:
logging.error("Version section or keys not found in INI file. Using default values.")
except Exception as e:
logging.error(f"Error reading INI file: {e}. Using default values.")
logging.error(
"Version section or keys not found in INI file. Using default values."
)
except Exception as exp:
logging.error(f"Error reading INI file: {exp}. Using default values.")

return {"major": "0", "minor": "1", "patch": "0-beta"}

Expand Down Expand Up @@ -750,8 +751,7 @@ def set_col_conn_for_uuid(info, uuid):

col_conn = info.context["mongo_db_client"].get_collection_conn(uuid)

conn = {'col_conn': col_conn,
'data_loader': BatchLoaders(col_conn)}
conn = {"col_conn": col_conn, "data_loader": BatchLoaders(col_conn)}

parent_key = get_path_parent_key(info)
info.context.setdefault(parent_key, conn)
Expand All @@ -763,12 +763,12 @@ def set_col_conn_for_uuid(info, uuid):

def get_col_conn(info):
parent_key = get_path_parent_key(info)
return info.context[parent_key]['col_conn']
return info.context[parent_key]["col_conn"]


def get_data_loader(info):
parent_key = get_path_parent_key(info)
return info.context[parent_key]['data_loader']
return info.context[parent_key]["data_loader"]


def get_path_parent_key(info):
Expand Down
Loading
Loading