Skip to content

Commit

Permalink
style: update ruff to 0.5.0 (#355)
Browse files Browse the repository at this point in the history
  • Loading branch information
korikuzma authored Jul 12, 2024
1 parent 2cf9912 commit 7e804ea
Show file tree
Hide file tree
Showing 30 changed files with 606 additions and 549 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ repos:
- id: trailing-whitespace
- id: end-of-file-fixer
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.2.0
rev: v0.5.0
hooks:
- id: ruff
- id: ruff-format
4 changes: 2 additions & 2 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ fastapi = "*"
uvicorn = "*"
click = "*"
boto3 = "*"
"ga4gh.vrs" = "~=2.0.0a1"
"ga4gh.vrs" = "~=2.0.0a8"

[dev-packages]
gene = {editable = true, path = "."}
Expand All @@ -19,7 +19,7 @@ wags-tails = ">=0.1.1"
psycopg = {version = "*", extras=["binary"]}
pytest = "*"
pre-commit = "*"
ruff = ">=0.1.2"
ruff = "==0.5.0"
pytest-cov = "*"
httpx = "*"
mock = "*"
Expand Down
31 changes: 17 additions & 14 deletions docs/scripts/generate_normalize_figure.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@
Embeddable HTML for the normalization figure should be deposited in the correct
location, within docs/source/_static/html/.
"""
""" # noqa: INP001

import json
from typing import Dict

import gravis as gv

Expand All @@ -24,7 +24,7 @@
]


def create_gjgf(result: UnmergedNormalizationService) -> Dict:
def create_gjgf(result: UnmergedNormalizationService) -> dict:
"""Create gravis input.
:param result: result from Unmerged Normalization search
Expand All @@ -43,36 +43,39 @@ def create_gjgf(result: UnmergedNormalizationService) -> Dict:
}
}

for i, (source, matches) in enumerate(result.source_matches.items()):
for i, (_, matches) in enumerate(result.source_matches.items()):
for match in matches.records:
graph["graph"]["nodes"][match.concept_id] = {
"metadata": {
"color": COLORS[i],
"hover": f"{match.concept_id}\n{match.symbol}\n<i>{match.label}</i>", # noqa: E501
"click": f"<p color='black'>{json.dumps(match.model_dump(), indent=2)}</p>", # noqa: E501
"hover": f"{match.concept_id}\n{match.symbol}\n<i>{match.label}</i>",
"click": f"<p color='black'>{json.dumps(match.model_dump(), indent=2)}</p>",
}
}
for xref in match.xrefs:
graph["graph"]["edges"].append(
{"source": match.concept_id, "target": xref}
)

included_edges = []
for edge in graph["graph"]["edges"]:
included_edges = [
edge
for edge in graph["graph"]["edges"]
if (
edge["target"] in graph["graph"]["nodes"]
and edge["source"] in graph["graph"]["nodes"]
):
included_edges.append(edge)
)
]

graph["graph"]["edges"] = included_edges

included_nodes = {k["source"] for k in graph["graph"]["edges"]}.union(
{k["target"] for k in graph["graph"]["edges"]}
)
new_nodes = {}
for key, value in graph["graph"]["nodes"].items():
if key in included_nodes:
new_nodes[key] = value
new_nodes = {
key: value
for key, value in graph["graph"]["nodes"].items()
if key in included_nodes
}
graph["graph"]["nodes"] = new_nodes

return graph
Expand Down
2 changes: 1 addition & 1 deletion docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def linkcode_resolve(domain, info):
if not info["module"]:
return None
filename = info["module"].replace(".", "/")
return f"https://github.com/cancervariants/gene-normalization/blob/main/{filename}.py" # noqa: E501
return f"https://github.com/cancervariants/gene-normalization/blob/main/{filename}.py"


# -- code block style --------------------------------------------------------
Expand Down
24 changes: 20 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ dynamic = ["version"]
pg = ["psycopg[binary]"]
etl = ["gffutils", "biocommons.seqrepo", "wags-tails>=0.1.1"]
test = ["pytest>=6.0", "pytest-cov", "mock", "httpx"]
dev = ["pre-commit", "ruff==0.2.0"]
dev = ["pre-commit", "ruff==0.5.0"]
docs = [
"sphinx==6.1.3",
"sphinx-autodoc-typehints==1.22.0",
Expand Down Expand Up @@ -107,16 +107,22 @@ select = [
"DTZ", # https://docs.astral.sh/ruff/rules/#flake8-datetimez-dtz
"T10", # https://docs.astral.sh/ruff/rules/#flake8-datetimez-dtz
"EM", # https://docs.astral.sh/ruff/rules/#flake8-errmsg-em
"LOG", # https://docs.astral.sh/ruff/rules/#flake8-logging-log
"G", # https://docs.astral.sh/ruff/rules/#flake8-logging-format-g
"INP", # https://docs.astral.sh/ruff/rules/#flake8-no-pep420-inp
"PIE", # https://docs.astral.sh/ruff/rules/#flake8-pie-pie
"T20", # https://docs.astral.sh/ruff/rules/#flake8-print-t20
"PT", # https://docs.astral.sh/ruff/rules/#flake8-pytest-style-pt
"Q", # https://docs.astral.sh/ruff/rules/#flake8-quotes-q
"RSE", # https://docs.astral.sh/ruff/rules/#flake8-raise-rse
"RET", # https://docs.astral.sh/ruff/rules/#flake8-return-ret
"SLF", # https://docs.astral.sh/ruff/rules/#flake8-self-slf
"SIM", # https://docs.astral.sh/ruff/rules/#flake8-simplify-sim
"ARG", # https://docs.astral.sh/ruff/rules/#flake8-unused-arguments-arg
"PTH", # https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth
"PGH", # https://docs.astral.sh/ruff/rules/#pygrep-hooks-pgh
"PERF", # https://docs.astral.sh/ruff/rules/#perflint-perf
"FURB", # https://docs.astral.sh/ruff/rules/#refurb-furb
"RUF", # https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf
]
fixable = [
Expand All @@ -127,15 +133,19 @@ fixable = [
"ANN",
"B",
"C4",
"LOG",
"G",
"PIE",
"PT",
"RSE",
"SIM",
"PERF",
"FURB",
"RUF"
]
# ANN101 - missing-type-self
# ANN003 - missing-type-kwargs
# ANN101 - missing-type-self
# ANN102 - missing-type-cls
# D203 - one-blank-line-before-class
# D205 - blank-line-after-summary
# D206 - indent-with-spaces*
Expand All @@ -151,7 +161,7 @@ fixable = [
# S321 - suspicious-ftp-lib-usage
# *ignored for compatibility with formatter
ignore = [
"ANN101", "ANN003",
"ANN003", "ANN101", "ANN102",
"D203", "D205", "D206", "D213", "D300", "D400", "D415",
"E111", "E114", "E117", "E501",
"W191",
Expand All @@ -169,7 +179,13 @@ ignore = [
# D100 - undocumented-public-module
# D103 - undocumented-public-function
# I001 - unsorted-imports
"tests/*" = ["ANN001", "ANN2", "ANN102", "S101", "B011"]
# INP001 - implicit-namespace-package
# ARG001 - unused-function-argument
# SLF001 - private-member-acces
"tests/*" = ["ANN001", "ANN2", "ANN102", "S101", "INP001", "SLF001", "ARG001"]
"*__init__.py" = ["F401"]
"gene/schemas.py" = ["ANN001", "ANN201", "N805"]
"docs/source/conf.py" = ["D100", "I001", "D103", "ANN201", "ANN001"]

[tool.ruff.format]
docstring-code-format = true
5 changes: 3 additions & 2 deletions src/gene/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
"""The VICC library for normalizing genes."""

from os import environ
from pathlib import Path

from .version import __version__ # noqa: F401
from .version import __version__

APP_ROOT = Path(__file__).resolve().parent

Expand Down Expand Up @@ -37,7 +38,7 @@ class DownloadException(Exception): # noqa: N818
PREFIX_LOOKUP = {
v.value: SourceName[k].value
for k, v in NamespacePrefix.__members__.items()
if k in SourceName.__members__.keys()
if k in SourceName.__members__
}

# use to generate namespace prefix from source ID value
Expand Down
31 changes: 17 additions & 14 deletions src/gene/cli.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
"""Provides a CLI util to make updates to normalizer database."""

import logging
import os
from collections.abc import Collection
from pathlib import Path
from timeit import default_timer as timer
from typing import Collection, List, Optional, Set

import click

Expand Down Expand Up @@ -61,7 +62,7 @@ def check_db(db_url: str, verbose: bool = False) -> None:
@click.command()
@click.option("--data_url", help="URL to data dump")
@click.option("--db_url", help="URL endpoint for the application database.")
def update_from_remote(data_url: Optional[str], db_url: str) -> None:
def update_from_remote(data_url: str | None, db_url: str) -> None:
"""Update data from remotely-hosted DB dump. By default, fetches from latest
available dump on VICC S3 bucket; specific URLs can be provided instead by
command line option or GENE_NORM_REMOTE_DB_URL environment variable.
Expand All @@ -81,10 +82,10 @@ def update_from_remote(data_url: Optional[str], db_url: str) -> None:
except NotImplementedError:
click.echo(
f"Error: Fetching remote data dump not supported for {db.__class__.__name__}"
) # noqa: E501
)
click.get_current_context().exit(1)
except DatabaseException as e:
click.echo(f"Encountered exception during update: {str(e)}")
click.echo(f"Encountered exception during update: {e!s}")
click.get_current_context().exit(1)
_logger.info("Successfully loaded data from remote snapshot.")

Expand All @@ -106,18 +107,18 @@ def dump_database(output_directory: Path, db_url: str) -> None:
""" # noqa: D301
_configure_logging()
if not output_directory:
output_directory = Path(".")
output_directory = Path()

db = create_db(db_url, False)
try:
db.export_db(output_directory)
except NotImplementedError:
click.echo(
f"Error: Dumping data to file not supported for {db.__class__.__name__}"
) # noqa: E501
)
click.get_current_context().exit(1)
except DatabaseException as e:
click.echo(f"Encountered exception during update: {str(e)}")
click.echo(f"Encountered exception during update: {e!s}")
click.get_current_context().exit(1)
_logger.info("Database dump successful.")

Expand All @@ -137,7 +138,7 @@ def _update_normalizer(
:param use_existing: if True, use most recent local version of source data instead of
fetching from remote
"""
processed_ids = list()
processed_ids = []
for n in sources:
delete_time = _delete_source(n, db)
_load_source(n, db, delete_time, processed_ids, use_existing)
Expand Down Expand Up @@ -173,7 +174,7 @@ def _load_source(
n: SourceName,
db: AbstractDatabase,
delete_time: float,
processed_ids: List[str],
processed_ids: list[str],
use_existing: bool,
) -> None:
"""Load individual source data.
Expand All @@ -199,7 +200,7 @@ def _load_source(
f"Encountered ModuleNotFoundError attempting to import {e.name}. {_etl_dependency_help}"
)
click.get_current_context().exit()
SourceClass = eval(n.value) # noqa: N806
SourceClass = eval(n.value) # noqa: N806, S307

source = SourceClass(database=db, silent=False)
try:
Expand Down Expand Up @@ -234,7 +235,7 @@ def _delete_normalized_data(database: AbstractDatabase) -> None:
click.echo(f"Deleted normalized records in {delete_time:.5f} seconds.")


def _load_merge(db: AbstractDatabase, processed_ids: Set[str]) -> None:
def _load_merge(db: AbstractDatabase, processed_ids: set[str]) -> None:
"""Load merged concepts
:param db: database instance
Expand Down Expand Up @@ -313,19 +314,21 @@ def update_normalizer_db(
ctx = click.get_current_context()
click.echo(
"Must either enter 1 or more sources, or use `--update_all` parameter"
) # noqa: E501
)
click.echo(ctx.get_help())
ctx.exit()
else:
sources_split = sources.lower().split()

if len(sources_split) == 0:
raise Exception("Must enter 1 or more source names to update")
err_msg = "Must enter 1 or more source names to update"
raise Exception(err_msg)

non_sources = set(sources_split) - set(SOURCES)

if len(non_sources) != 0:
raise Exception(f"Not valid source(s): {non_sources}")
err_msg = f"Not valid source(s): {non_sources}"
raise Exception(err_msg)

parsed_source_names = {SourceName(SOURCES[s]) for s in sources_split}
_update_normalizer(parsed_source_names, db, update_merged, use_existing)
Expand Down
1 change: 1 addition & 0 deletions src/gene/database/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Provide database clients."""

from .database import (
AWS_ENV_VAR_NAME,
AbstractDatabase,
Expand Down
Loading

0 comments on commit 7e804ea

Please sign in to comment.