From 068e1081ddb623308927b80e922779b12becb0e2 Mon Sep 17 00:00:00 2001 From: Michael Puehringer Date: Wed, 21 Dec 2022 19:01:04 +0100 Subject: [PATCH 01/13] Upgrade deps and remove numpy, numba, pandas --- Makefile | 6 ++- pyrightconfig.json | 4 ++ requirements.txt | 37 +++++++--------- requirements_dev.txt | 15 +++---- tdp_core/__init__.py | 15 +++++-- tdp_core/dataset/dataset_api.py | 5 ++- tdp_core/dataset/dataset_def.py | 2 +- tdp_core/dataset/graph/graph.py | 2 +- tdp_core/dataset/graph/graph_api.py | 4 +- tdp_core/db.py | 23 +++++----- tdp_core/dbmanager.py | 6 +-- tdp_core/dbmigration/env.py | 2 +- tdp_core/dbmigration/manager.py | 26 ++++++----- tdp_core/dbview.py | 26 ++++++----- tdp_core/encoder/json_encoder.py | 16 +++---- tdp_core/formatter.py | 8 +++- tdp_core/graph.py | 8 ++-- tdp_core/id_mapping/manager.py | 8 ++-- tdp_core/manager.py | 12 ++--- .../middleware/request_context_middleware.py | 5 ++- tdp_core/mol_img/img_api.py | 2 +- tdp_core/mol_img/models.py | 2 +- tdp_core/mol_img/util/draw.py | 4 +- tdp_core/mol_img/util/molecule.py | 2 +- tdp_core/plugin/model.py | 8 ++-- tdp_core/plugin/parser.py | 4 +- tdp_core/plugin/registry.py | 2 +- tdp_core/proxy.py | 5 ++- tdp_core/security/manager.py | 20 +++++---- tdp_core/security/permissions.py | 10 +++-- tdp_core/security/store/dummy_store.py | 1 - tdp_core/server/mainapp.py | 25 ++++++----- tdp_core/server/utils.py | 13 +++--- tdp_core/server/visyn_server.py | 19 +++++--- tdp_core/settings/model.py | 4 +- tdp_core/settings/router.py | 8 ++-- tdp_core/settings/utils.py | 2 +- tdp_core/sql_use_gevent.py | 44 ------------------- tdp_core/storage.py | 6 +-- tdp_core/swagger.py | 8 ++-- tdp_core/tests/fixtures/postgres_db.py | 4 +- tdp_core/tests/test_rdkit_img.py | 2 +- tdp_core/utils.py | 3 +- tdp_core/xlsx.py | 15 ++++--- 44 files changed, 222 insertions(+), 221 deletions(-) create mode 100644 pyrightconfig.json delete mode 100644 tdp_core/sql_use_gevent.py diff --git a/Makefile b/Makefile index 929e3c57b..092dc9d8b 100644 --- a/Makefile +++ b/Makefile @@ -4,6 +4,7 @@ pkg_src = tdp_core flake8 = flake8 $(pkg_src) setup.py isort = isort $(pkg_src) setup.py black = black --line-length 140 $(pkg_src) setup.py +pyright = pyright $(pkg_src) .PHONY: all ## Perform the most common development-time rules all: format lint test @@ -21,16 +22,17 @@ check-format: $(isort) --check-only $(black) --check -.PHONY: lint ## Run flake8 +.PHONY: lint ## Run flake8 and pyright lint: $(flake8) + $(pyright) .PHONY: test ## Run tests test: pytest $(pkg_src) .PHONEY: documentation ## Generate docs -documentation: +documentation: mkdocs build .PHONY: install ## Install the requirements diff --git a/pyrightconfig.json b/pyrightconfig.json new file mode 100644 index 000000000..5a3459637 --- /dev/null +++ b/pyrightconfig.json @@ -0,0 +1,4 @@ +{ + "typeCheckingMode": "basic", + "useLibraryCodeForTypes": true, +} diff --git a/requirements.txt b/requirements.txt index bc7496ad6..cfa90326d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,30 +1,25 @@ -alembic==1.7.7 -fastapi==0.75.0 -cachetools==5.0.0 -flask-smorest==0.37.0 +alembic==1.9.0 +cachetools==5.2.0 +fastapi[all]==0.88.0 flask-swagger-ui==3.36.0 -Flask==2.0.3 +Flask[async]==2.2.2 json-cfg==0.4.2 -marshmallow-sqlalchemy==0.28.0 -marshmallow==3.15.0 -numba==0.55.1 -numpy==1.21.6 +marshmallow-sqlalchemy==0.28.1 +marshmallow==3.19.0 openpyxl==3.0.9 -pandas==1.4.1 -Pillow==9.0.1 -psycopg2==2.9.3 -pydantic==1.9.0 -pyjwt==2.4.0 +Pillow==9.3.0 +psycopg==3.1.7 +psycopg2==2.9.5 +pydantic==1.10.2 +pyjwt==2.6.0 pymongo~=3.11.1 +pytest-postgresql==4.1.1 python-dateutil==2.8.2 python-memcached==1.59 python-multipart==0.0.5 -requests==2.27.1 -SQLAlchemy==1.4.32 -sqlmodel==0.0.6 +rdkit==2022.3.5 +requests==2.28.1 +SQLAlchemy==1.4.45 urllib3==1.26.9 -uvicorn[standard]==0.17.6 +uvicorn[standard]==0.20.0 yamlreader==3.0.4 -pytest-postgresql==4.1.1 -psycopg==3.0.16 -rdkit==2022.3.5 diff --git a/requirements_dev.txt b/requirements_dev.txt index a79d5013e..1e2ac037e 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -1,9 +1,8 @@ -black~=22.3.0 -debugpy~=1.5.1 -flake8~=4.0.1 -isort~=5.10.1 -mkdocs-material~=8.2.8 -pep8-naming~=0.12.1 +black~=22.12.0 +debugpy~=1.6.4 +flake8~=6.0.0 +isort~=5.11.3 +pep8-naming~=0.13.3 +pyright~=1.1.285 pytest-runner~=6.0.0 -pytest~=7.1.1 -recommonmark~=0.7.1 +pytest~=7.2.0 diff --git a/tdp_core/__init__.py b/tdp_core/__init__.py index a71a8f764..a05618cbf 100644 --- a/tdp_core/__init__.py +++ b/tdp_core/__init__.py @@ -10,6 +10,10 @@ def init_app(self, app: FastAPI): app.include_router(img_api.app) def register(self, registry: RegHelper): + import logging + + _log = logging.getLogger(__name__) + # phovea_server registry.append( "namespace", @@ -23,7 +27,14 @@ def register(self, registry: RegHelper): "tdp_core.id_mapping.idtype_api", {"namespace": "/api/idtype", "factory": "create_idtype"}, ) - registry.append("json-encoder", "numpy", "tdp_core.encoder.json_encoder") + + try: + import numpy # noqa, type: ignore + + registry.append("json-encoder", "numpy", "tdp_core.encoder.json_encoder") + except ImportError: + _log.info('numpy not available, skipping "numpy" json encoder') + registry.append("json-encoder", "set-encoder", "tdp_core.encoder.set_encoder", {}) registry.append( @@ -63,8 +74,6 @@ def register(self, registry: RegHelper): registry.append_router("tdp_plugin_router", "tdp_core.plugin.router", {}) registry.append("namespace", "tdp_xlsx2json", "tdp_core.xlsx", {"namespace": "/api/tdp/xlsx"}) registry.append("mapping_provider", "tdp_core", "tdp_core.mapping_table") - # TODO: Check if this is still required? - registry.append("greenifier", "psycopg2", "tdp_core.sql_use_gevent", {}) # DB migration plugins registry.append( diff --git a/tdp_core/dataset/dataset_api.py b/tdp_core/dataset/dataset_api.py index e3cabfa99..a28fd2c88 100644 --- a/tdp_core/dataset/dataset_api.py +++ b/tdp_core/dataset/dataset_api.py @@ -1,7 +1,8 @@ import logging from builtins import str -from flask import Flask, Response, abort, make_response, request +from flask import Flask, abort, make_response, request +from flask.wrappers import Response from .. import manager from ..utils import etag, jsonify, to_json @@ -92,7 +93,7 @@ def _list_datasets(): data = [d.to_description() for d in iter() if query(d)] limit = request.values.get("limit", -1) - if 0 < limit < len(data): + if 0 < int(limit) < len(data): data = data[:limit] format = request.args.get("format", "json") diff --git a/tdp_core/dataset/dataset_def.py b/tdp_core/dataset/dataset_def.py index 82ffc4418..bb9d6b8e0 100644 --- a/tdp_core/dataset/dataset_def.py +++ b/tdp_core/dataset/dataset_def.py @@ -97,7 +97,7 @@ class ADataSetProvider(object, metaclass=abc.ABCMeta): def __len__(self): import itertools - return itertools.count(self) + return itertools.count(self) # type: ignore @abc.abstractmethod def __iter__(self): diff --git a/tdp_core/dataset/graph/graph.py b/tdp_core/dataset/graph/graph.py index 54fde4925..97d0900c7 100644 --- a/tdp_core/dataset/graph/graph.py +++ b/tdp_core/dataset/graph/graph.py @@ -112,7 +112,7 @@ def resolve_edges(self, edges): def _resolve_parser(format): for p in manager.registry.list("graph-parser"): - if p.format == format: + if p.format == format: # type: ignore return p.load() diff --git a/tdp_core/dataset/graph/graph_api.py b/tdp_core/dataset/graph/graph_api.py index d6c9128fa..8252acef2 100644 --- a/tdp_core/dataset/graph/graph_api.py +++ b/tdp_core/dataset/graph/graph_api.py @@ -23,9 +23,9 @@ def format_json(dataset, args): def resolve_formatter(type, format): for p in manager.registry.list(type + "-formatter"): - if p.format == format: + if p.format == format: # type: ignore return p.load() - formats = ",".join(p.format for p in manager.registry.list(type + "-formatter")) + formats = ",".join(p.format for p in manager.registry.list(type + "-formatter")) # type: ignore abort(400, 'unknown format "{0}" possible formats are: {1}'.format(format, formats)) diff --git a/tdp_core/db.py b/tdp_core/db.py index 714b10918..53f3e67ff 100644 --- a/tdp_core/db.py +++ b/tdp_core/db.py @@ -1,6 +1,9 @@ import logging +from typing import Any, Dict, List, Optional from flask import abort +from sqlalchemy.exc import OperationalError +from sqlalchemy.orm import Session from werkzeug.datastructures import MultiDict from . import manager @@ -105,7 +108,7 @@ def __init__(self, engine): :param engine: """ _log.info("creating session") - self._session = manager.db.create_session(engine) + self._session: Session = manager.db.create_session(engine) self._supports_array_parameter = _supports_sql_parameters(engine.name) def execute(self, sql, **kwargs): @@ -115,13 +118,11 @@ def execute(self, sql, **kwargs): :param kwargs: additional args to replace :return: the session result """ - import sqlalchemy - parsed = to_query(sql, self._supports_array_parameter, kwargs) _log.info("%s (%s)", parsed, kwargs) try: return self._session.execute(parsed, kwargs) - except sqlalchemy.exc.OperationalError as error: + except OperationalError as error: abort(408, error) def run(self, sql, **kwargs): @@ -154,7 +155,7 @@ def _destroy(self): if self._session: _log.info("removing session again") self._session.close() - self._session = None + self._session = None # type: ignore def __del__(self): self._destroy() @@ -194,7 +195,7 @@ def _normalize_columns(col): r["type"] = "number" elif isinstance(t, types.Enum): r["type"] = "categorical" - r["categories"] = sorted(t.enums, key=lambda s: s.lower()) + r["categories"] = sorted(t.enums, key=lambda s: s.lower()) # type: ignore return r return map(_normalize_columns, columns) @@ -235,7 +236,7 @@ def _handle_aggregated_score(base_view, config, replacements, args): return replacements -def prepare_arguments(view, config, replacements=None, arguments=None, extra_sql_argument=None): +def prepare_arguments(view, config, replacements=None, arguments: Optional[Dict] = None, extra_sql_argument=None): """ prepares for the given view the kwargs and replacements based on the given input :param view: db view @@ -267,10 +268,10 @@ def prepare_arguments(view, config, replacements=None, arguments=None, extra_sql parser = info.type if info and info.type is not None else lambda x: x try: if info and info.as_list: - vs = arguments.getlist(lookup_key) if hasattr(arguments, "getlist") else arguments.get(lookup_key) + vs: List[Any] = arguments.getlist(lookup_key) if hasattr(arguments, "getlist") else arguments.get(lookup_key) # type: ignore value = tuple([parser(v) for v in vs]) # multi values need to be a tuple not a list elif info and info.list_as_tuple: - vs = arguments.getlist(lookup_key) if hasattr(arguments, "getlist") else arguments.get(lookup_key, []) + vs = arguments.getlist(lookup_key) if hasattr(arguments, "getlist") else arguments.get(lookup_key, []) # type: ignore if len(vs) == 0: value = "(1, null)" else: @@ -346,7 +347,7 @@ def get_data( return query(engine, arguments, filters), view with session(engine) as sess: - if config.statement_timeout is not None: + if config.statement_timeout and config.statement_timeout_query: _log.info("set statement_timeout to {}".format(config.statement_timeout)) sess.execute(config.statement_timeout_query.format(config.statement_timeout)) r = sess.run(query.format(**replace), **kwargs) @@ -434,7 +435,7 @@ def get_count(database, view_name, args): return count_query(engine, processed_args, where_clause) with session(engine) as sess: - if config.statement_timeout is not None: + if config.statement_timeout and config.statement_timeout_query: _log.info("set statement_timeout to {}".format(config.statement_timeout)) sess.execute(config.statement_timeout_query.format(config.statement_timeout)) r = sess.run(count_query.format(**replace), **kwargs) diff --git a/tdp_core/dbmanager.py b/tdp_core/dbmanager.py index 29b8bd05f..594c45bde 100644 --- a/tdp_core/dbmanager.py +++ b/tdp_core/dbmanager.py @@ -1,5 +1,5 @@ import logging -from typing import Dict, Union +from typing import Any, Dict, Union from fastapi import FastAPI from sqlalchemy.engine import Engine @@ -26,7 +26,7 @@ def init_app(self, app: FastAPI): app.add_middleware(CloseWebSessionsMiddleware) for p in manager.registry.list("tdp-sql-database-definition"): - config = manager.settings.get_nested(p.configKey) + config: Dict[str, Any] = manager.settings.get_nested(p.configKey) # type: ignore connector: DBConnector = p.load().factory() if not connector.dburl: connector.dburl = config["dburl"] @@ -38,7 +38,7 @@ def init_app(self, app: FastAPI): _log.critical( "no db url defined for %s at config key %s - is your configuration up to date?", p.id, - p.configKey, + p.configKey, # type: ignore ) continue diff --git a/tdp_core/dbmigration/env.py b/tdp_core/dbmigration/env.py index b06c2fe0c..a6a3b1da6 100644 --- a/tdp_core/dbmigration/env.py +++ b/tdp_core/dbmigration/env.py @@ -39,7 +39,7 @@ def run_migrations_online(): additional_configuration["version_table_schema"] = version_table_schema connectable = engine_from_config( - config.get_section(config.config_ini_section), + config.get_section(config.config_ini_section), # type: ignore prefix="sqlalchemy.", poolclass=pool.NullPool, ) diff --git a/tdp_core/dbmigration/manager.py b/tdp_core/dbmigration/manager.py index 3da6c0909..454af2e3f 100644 --- a/tdp_core/dbmigration/manager.py +++ b/tdp_core/dbmigration/manager.py @@ -2,10 +2,11 @@ import re from argparse import REMAINDER from os import path -from typing import Dict, List, Optional +from typing import Any, Dict, List, Optional import alembic.command import alembic.config +from alembic.util.exc import CommandError from fastapi import FastAPI from .. import manager @@ -29,7 +30,7 @@ def __init__( script_location: str, *, auto_upgrade: bool = False, - version_table_schema: str = None, + version_table_schema: Optional[str] = None, ): """ Initializes a new migration object and optionally carries out an upgrade. @@ -59,7 +60,7 @@ def __init__( self.execute(["upgrade", "head"]) _log.info(f"Successfully upgraded database {self.id}") # As alembic is actually a commandline tool, it sometimes uses sys.exit (https://github.com/sqlalchemy/alembic/blob/master/alembic/util/messaging.py#L63) - except (SystemExit, alembic.util.exc.CommandError): + except (SystemExit, CommandError): _log.exception(f"Error upgrading database {self.id}") def __repr__(self) -> str: @@ -90,11 +91,11 @@ def get_custom_command(self, arguments: List[str] = []) -> Optional[List[str]]: """ if arguments: # Join the list with spaces - arguments = " ".join(arguments) + arguments_str = " ".join(arguments) # For all the command patterns we have .. for key, value in self.custom_commands.items(): # .. check if we can match the command pattern with the given string - matched = re.match(f"{key}$", arguments) + matched = re.match(f"{key}$", arguments_str) if matched: # If we have a match, call format with the captured groups and split by ' ' return value.format(*matched.groups()).split(" ") @@ -162,8 +163,11 @@ def init_app(self, app: FastAPI, plugins: List[AExtensionDesc] = []): for p in plugins: _log.info("DBMigration found: %s", p.id) + # TODO: The AExtensionDesc doesn't have any typing information, so we need to cast it to Any here + p: Any = p + # Check if configKey is set, otherwise use the plugin configuration - config = manager.settings.get_nested(p.configKey, {}) if hasattr(p, "configKey") else {} + config: dict = manager.settings.get_nested(p.configKey, {}) if hasattr(p, "configKey") else {} # type: ignore # Priority of assignments: Configuration File -> Plugin Definition id = config.get("id") or (p.id if hasattr(p, "id") else None) @@ -211,10 +215,10 @@ def init_app(self, app: FastAPI, plugins: List[AExtensionDesc] = []): # Create new migration migration = DBMigration( - id, - db_url, - script_location, - auto_upgrade=auto_upgrade, + id, # type: ignore + db_url, # type: ignore + script_location, # type: ignore + auto_upgrade=auto_upgrade, # type: ignore version_table_schema=version_table_schema, ) @@ -276,6 +280,6 @@ def execute(args): # Using REMAINDER as nargs causes the argument to be be optional, but '+' does not work because it also parses additional --attr with the parser which should actually be ignored. # Therefore, args.command might be empty and we simply pass None to trigger the error message - manager.db_migration[args.id].execute(args.command if len(args.command) > 0 else None) + manager.db_migration[args.id].execute(args.command if len(args.command) > 0 else None) # type: ignore return lambda args: lambda: execute(args) diff --git a/tdp_core/dbview.py b/tdp_core/dbview.py index 8e3b3c703..616e84beb 100644 --- a/tdp_core/dbview.py +++ b/tdp_core/dbview.py @@ -1,6 +1,7 @@ import logging import re from collections import OrderedDict +from typing import Any, Optional import sqlalchemy from sqlalchemy.engine import Engine @@ -49,7 +50,7 @@ def __init__(self, idtype=None, query=None): self.query = query self.queries = {} self.columns = OrderedDict() - self.columns_filled_up = None + self.columns_filled_up = False self.replacements = [] self.valid_replacements = {} self.arguments = [] @@ -65,7 +66,7 @@ def needs_to_fill_up_columns(self): def dump(self, name): from collections import OrderedDict - r = OrderedDict(name=name, description=self.description, type=self.query_type) + r: OrderedDict[str, Any] = OrderedDict(name=name, description=self.description, type=self.query_type) r["idType"] = self.idtype r["query"] = clean_query(self.query) args = [a for a in self.arguments] @@ -609,10 +610,10 @@ def __init__(self, views, agg_score=None, mappings=None): """ self.agg_score = agg_score or default_agg_score self.views = views - self.dburl = None + self.dburl: str = None # type: ignore self.mappings = mappings self.statement_timeout = None - self.statement_timeout_query = None + self.statement_timeout_query: Optional[str] = None self.description = "" def dump(self, name): @@ -621,13 +622,16 @@ def dump(self, name): def create_engine(self, config) -> Engine: engine_options = config.get("engine", {}) engine = sqlalchemy.create_engine(self.dburl, **engine_options) - # Assuming that gevent monkey patched the builtin - # threading library, we're likely good to use - # SQLAlchemy's QueuePool, which is the default - # pool class. However, we need to make it use - # threadlocal connections - # https://github.com/kljensen/async-flask-sqlalchemy-example/blob/master/server.py - engine.pool._use_threadlocal = True + try: + # Assuming that gevent monkey patched the builtin + # threading library, we're likely good to use + # SQLAlchemy's QueuePool, which is the default + # pool class. However, we need to make it use + # threadlocal connections + # https://github.com/kljensen/async-flask-sqlalchemy-example/blob/master/server.py + engine.pool._use_threadlocal = True # type: ignore + except Exception: + pass return engine diff --git a/tdp_core/encoder/json_encoder.py b/tdp_core/encoder/json_encoder.py index 7f5862dc5..b9910f410 100644 --- a/tdp_core/encoder/json_encoder.py +++ b/tdp_core/encoder/json_encoder.py @@ -2,17 +2,17 @@ import decimal from builtins import object, range -import numpy as np -import numpy.ma as ma +import numpy as np # type: ignore +import numpy.ma as ma # type: ignore class NumpyTablesEncoder(object): def __contains__(self, obj): - if isinstance(obj, np.ndarray): + if isinstance(obj, np.ndarray): # type: ignore return True if isinstance(obj, bytes): return True - if isinstance(obj, np.generic): + if isinstance(obj, np.generic): # type: ignore return True if isinstance(obj, dt.datetime): return True @@ -21,14 +21,14 @@ def __contains__(self, obj): return False def __call__(self, obj, base_encoder): - if isinstance(obj, np.ndarray): + if isinstance(obj, np.ndarray): # type: ignore if obj.ndim == 1: return [base_encoder.default(x) for x in obj] else: return [base_encoder.default(obj[i]) for i in range(obj.shape[0])] - if isinstance(obj, np.generic): - a = np.asscalar(obj) - if (isinstance(a, float) and np.isnan(a)) or ma.is_masked(a): + if isinstance(obj, np.generic): # type: ignore + a = np.asscalar(obj) # type: ignore + if (isinstance(a, float) and np.isnan(a)) or ma.is_masked(a): # type: ignore return None return a if isinstance(obj, dt.datetime): diff --git a/tdp_core/formatter.py b/tdp_core/formatter.py index a53f884d3..0bc9f209d 100644 --- a/tdp_core/formatter.py +++ b/tdp_core/formatter.py @@ -1,10 +1,14 @@ -from flask import Response, jsonify, request +from flask import jsonify, request +from flask.wrappers import Response def _format_csv(array_of_dicts): import io - import pandas as pd + try: + import pandas as pd # type: ignore + except ImportError: + raise ImportError("pandas is required to format as csv") if not array_of_dicts: return Response("", mimetype="text/csv") diff --git a/tdp_core/graph.py b/tdp_core/graph.py index 31b303f2a..a76338798 100644 --- a/tdp_core/graph.py +++ b/tdp_core/graph.py @@ -34,7 +34,7 @@ def create(data, user, id, db): import datetime - entry = dict( + entry: dict[str, int | datetime.datetime | str] = dict( name=data["name"], description=data.get("description", ""), creator=user.name, @@ -130,7 +130,8 @@ def remove_node(self, id): return False if self._nodes: n = self.get_node(id) - self._nodes.remove(n) + if n: + self._nodes.remove(n) self._entry["nnodes"] -= 1 # remove node and all associated edges self._db.graph_data.update(self._find_data, {"$pull": dict(nodes=dict(id=id))}, multi=False) @@ -216,7 +217,8 @@ def remove_edge(self, id): return False if self._edges: n = self.get_edge(id) - self._edges.remove(n) + if n: + self._edges.remove(n) self._entry["nedges"] -= 1 self._db.graph.update(self._find_me, {"$inc": dict(nedges=-1)}) self._db.graph_data.update(self._find_data, {"$pull": dict(edges=dict(id=id))}) diff --git a/tdp_core/id_mapping/manager.py b/tdp_core/id_mapping/manager.py index 765529df4..e3d0f573a 100644 --- a/tdp_core/id_mapping/manager.py +++ b/tdp_core/id_mapping/manager.py @@ -70,7 +70,7 @@ def __find_all_paths(self, graph, start, end, path=[]): paths.append(newpath) return sorted(paths, key=len) - def __resolve_single(self, from_idtype, to_idtype, ids): + def __resolve_single(self, from_idtype, to_idtype, ids) -> list: from_mappings = self.mappers.get(from_idtype, {}) to_mappings = from_mappings.get(to_idtype, []) if not to_mappings: @@ -125,7 +125,7 @@ def can_map(self, from_idtype, to_idtype): def maps_to(self, from_idtype): return list(self.paths.get(from_idtype, {}).keys()) - def __call__(self, from_idtype, to_idtype, ids): + def __call__(self, from_idtype, to_idtype, ids) -> list: # If both id types are the same, simply return if from_idtype == to_idtype: return ids @@ -162,12 +162,12 @@ def __call__(self, from_idtype, to_idtype, ids): return result # Otherwise, check if every mapping was 1 to 1 - lengths = [len(x) for x in result] + lengths = [len(x) for x in result] # type: ignore # If any result array is longer than 1, we need to flatten and later merge it needs_merging = max(lengths, default=0) > 1 # Flatten result and assign to values values = list(chain.from_iterable(result)) - return result + return result # type: ignore def search(self, from_idtype, to_idtype, query, max_results=None): """ diff --git a/tdp_core/manager.py b/tdp_core/manager.py index 20e2bd61b..926d733e4 100644 --- a/tdp_core/manager.py +++ b/tdp_core/manager.py @@ -13,9 +13,9 @@ from .settings.model import GlobalSettings -db: "DBManager" = None -db_migration: "DBMigrationManager" = None -id_mapping: "MappingManager" = None -security: "SecurityManager" = None -registry: "Registry" = None -settings: "GlobalSettings" = None +db: "DBManager" = None # type: ignore +db_migration: "DBMigrationManager" = None # type: ignore +id_mapping: "MappingManager" = None # type: ignore +security: "SecurityManager" = None # type: ignore +registry: "Registry" = None # type: ignore +settings: "GlobalSettings" = None # type: ignore diff --git a/tdp_core/middleware/request_context_middleware.py b/tdp_core/middleware/request_context_middleware.py index 2ea5d3557..d102332d6 100644 --- a/tdp_core/middleware/request_context_middleware.py +++ b/tdp_core/middleware/request_context_middleware.py @@ -1,15 +1,16 @@ from contextvars import ContextVar +from typing import Optional from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint from starlette.requests import Request REQUEST_CTX_KEY = "fastapi_request" -_request_ctx_var: ContextVar[str] = ContextVar(REQUEST_CTX_KEY, default=None) +_request_ctx_var: ContextVar[Optional[Request]] = ContextVar(REQUEST_CTX_KEY, default=None) def get_request() -> Request: - return _request_ctx_var.get() + return _request_ctx_var.get() # type: ignore TODO: It is None in non-request context class RequestContextMiddleware(BaseHTTPMiddleware): diff --git a/tdp_core/mol_img/img_api.py b/tdp_core/mol_img/img_api.py index b7e0999d0..cb3f082db 100644 --- a/tdp_core/mol_img/img_api.py +++ b/tdp_core/mol_img/img_api.py @@ -1,7 +1,7 @@ from typing import List, Optional, Set from fastapi import APIRouter -from rdkit.Chem import Mol +from rdkit.Chem import Mol # type: ignore from rdkit.Chem.Scaffolds import MurckoScaffold from starlette.responses import Response from starlette.status import HTTP_204_NO_CONTENT diff --git a/tdp_core/mol_img/models.py b/tdp_core/mol_img/models.py index f4aa04065..21bd3cf6a 100644 --- a/tdp_core/mol_img/models.py +++ b/tdp_core/mol_img/models.py @@ -1,7 +1,7 @@ from typing import Dict, Optional from pydantic import BaseModel -from rdkit.Chem import Mol, MolFromSmarts, MolFromSmiles +from rdkit.Chem import Mol, MolFromSmarts, MolFromSmiles # type: ignore from starlette.responses import Response diff --git a/tdp_core/mol_img/util/draw.py b/tdp_core/mol_img/util/draw.py index 2d72fabb0..2dcf28fc8 100644 --- a/tdp_core/mol_img/util/draw.py +++ b/tdp_core/mol_img/util/draw.py @@ -1,12 +1,12 @@ from typing import Callable -from rdkit.Chem import Mol +from rdkit.Chem import Mol # type: ignore from rdkit.Chem.Draw import SimilarityMaps, rdMolDraw2D from rdkit.Chem.Draw.rdMolDraw2D import MolDraw2DSVG from rdkit.Chem.Draw.SimilarityMaps import GetSimilarityMapForFingerprint -def _draw_wrapper(draw_inner: Callable[[MolDraw2DSVG, ...], None]) -> Callable[..., str]: +def _draw_wrapper(draw_inner: Callable[[MolDraw2DSVG, ...], None]) -> Callable[..., str]: # type: ignore """Function wrapper for drawing Can annotate any function that takes a drawer as first arg, ignores its return type diff --git a/tdp_core/mol_img/util/molecule.py b/tdp_core/mol_img/util/molecule.py index d1781016d..3b2a67ba8 100644 --- a/tdp_core/mol_img/util/molecule.py +++ b/tdp_core/mol_img/util/molecule.py @@ -1,6 +1,6 @@ from typing import List, Optional -from rdkit.Chem import Mol, TemplateAlign, rdFMCS +from rdkit.Chem import Mol, TemplateAlign, rdFMCS # type: ignore def maximum_common_substructure_query_mol(mols: List[Mol]) -> Optional[Mol]: diff --git a/tdp_core/plugin/model.py b/tdp_core/plugin/model.py index a9ed32e77..95a7c34a6 100644 --- a/tdp_core/plugin/model.py +++ b/tdp_core/plugin/model.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from typing import Any, Dict, Type +from typing import Any, Dict, Optional, Type from fastapi import FastAPI from pydantic import BaseModel @@ -13,8 +13,8 @@ def __init__(self, plugin): def __iter__(self): return iter(self._items) - def append(self, type_: str, id_: str, module_: Any, desc: Dict[str, Any] = None): - desc = {} if desc is None else desc + def append(self, type_: str, id_: str, module_: Any, desc: Dict[str, Any] = {}): + desc = {} if not desc else desc desc["type"] = type_ desc["id"] = id_ desc["module"] = module_ @@ -35,5 +35,5 @@ def init_app(self, app: FastAPI): pass @property - def setting_class(self) -> Type[BaseModel]: + def setting_class(self) -> Optional[Type[BaseModel]]: return None diff --git a/tdp_core/plugin/parser.py b/tdp_core/plugin/parser.py index cb2d869df..f6b4b77d7 100644 --- a/tdp_core/plugin/parser.py +++ b/tdp_core/plugin/parser.py @@ -50,7 +50,7 @@ def __init__(self, entry_point: EntryPoint): self.name = self.id self.title = self.name self.description = "" - self.version = entry_point.dist.version + self.version = entry_point.dist.version if entry_point.dist else "0.0.0" self.extensions = [] @staticmethod @@ -115,7 +115,7 @@ def get_config_from_plugins(plugins: List[EntryPointPlugin]) -> Tuple[List[Dict[ # Load the class of the config and wrap it in a tuple like (, ...), # such that pydantic can use it as type-hint in the create_model class. # Otherwise, it would except to be the default value... - models[plugin.id] = (plugin_settings_model, ...) + models[plugin.id] = (plugin_settings_model, ...) # type: ignore # TODO: Currently we append an empty object as "default", but we should actually pass an instance of the settings model instead. files.append({f"{plugin.id}": {}}) diff --git a/tdp_core/plugin/registry.py b/tdp_core/plugin/registry.py index 2d52df6aa..66c67bf21 100644 --- a/tdp_core/plugin/registry.py +++ b/tdp_core/plugin/registry.py @@ -92,7 +92,7 @@ def __getitem__(self, item): def __iter__(self): return iter(self._extensions) - def list(self, plugin_type=None): + def list(self, plugin_type=None) -> List[ExtensionDesc] | "Registry": if plugin_type is None: return self if not hasattr(plugin_type, "__call__"): # not a callable diff --git a/tdp_core/proxy.py b/tdp_core/proxy.py index 7ca99c72a..1bf05d79a 100644 --- a/tdp_core/proxy.py +++ b/tdp_core/proxy.py @@ -1,7 +1,8 @@ import logging import requests -from flask import Flask, Response, abort, request +from flask import Flask, abort, request +from flask.wrappers import Response from . import manager @@ -15,7 +16,7 @@ def _to_site_url(site): for p in proxy_defs: if p.id == site: headers = getattr(p, "headers") if hasattr(p, "headers") else dict() - return p.url.format(**request.args.to_dict()), headers + return p.url.format(**request.args.to_dict()), headers # type: ignore # none matching found return None, None diff --git a/tdp_core/security/manager.py b/tdp_core/security/manager.py index 8e7406a03..3a26a358a 100644 --- a/tdp_core/security/manager.py +++ b/tdp_core/security/manager.py @@ -44,10 +44,10 @@ def access_token_to_payload(token: str) -> Dict: return jwt.decode(token, manager.settings.secret_key, algorithms=[manager.settings.jwt_algorithm]) -def access_token_to_user(token: str) -> User: +def access_token_to_user(token: str) -> Optional[User]: payload = access_token_to_payload(token) - username: str = payload.get("sub") - if username is None: + username: Optional[str] = payload.get("sub") + if not username: return None return User(id=username, access_token=token, roles=payload.get("roles", [])) @@ -91,12 +91,16 @@ def logout(self): u = self.current_user response_payload = {} response_cookies = [] + + if not u: + return response_payload, response_cookies + for store in self.user_stores: customizations = store.logout(u) or LogoutReturnValue() # data is an arbitrary Dict which is added to the response payload. - response_payload.update(customizations.data) + response_payload.update(customizations.data or {}) # cookies is a list of Dicts which are passed 1:1 to response.set_cookie. - response_cookies.extend(customizations.cookies) + response_cookies.extend(customizations.cookies or []) return response_payload, response_cookies def _delegate_stores_until_not_none(self, store_method_name: str, *args): @@ -244,7 +248,7 @@ def is_logged_in(): def current_username(): u = manager.security.current_user - return u.name if hasattr(u, "name") else ANONYMOUS_USER.name + return u.name if u and hasattr(u, "name") else ANONYMOUS_USER.name def current_user(): @@ -258,7 +262,7 @@ def login_required(f=None, *, users=(), roles=()): """Usage: @login_required or @login_required(users=("admin") or @login_required(roles=("admin"))""" def login_required_inner(fn=None): - @wraps(fn) + @wraps(fn) # type: ignore def decorator(*args, **kwargs): u = manager.security.current_user # Allow access only if a user is available @@ -270,7 +274,7 @@ def decorator(*args, **kwargs): raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail="User role not in allowed_roles in login_required request" ) - return fn(*args, **kwargs) + return fn(*args, **kwargs) # type: ignore return decorator diff --git a/tdp_core/security/permissions.py b/tdp_core/security/permissions.py index cfec33264..25e553904 100644 --- a/tdp_core/security/permissions.py +++ b/tdp_core/security/permissions.py @@ -1,3 +1,5 @@ +from typing import Optional + from .manager import current_user from .model import ANONYMOUS_USER, User @@ -66,7 +68,7 @@ def _includes(items, item): return False -def can(item, permission: int, user: User = None): +def can(item, permission: int, user: Optional[User] = None): if user is None: user = current_user() @@ -96,13 +98,13 @@ def can(item, permission: int, user: User = None): return permission in others -def can_read(data_description, user=None): +def can_read(data_description, user: Optional[User] = None): return can(data_description, PERMISSION_READ, user) -def can_write(data_description, user=None): +def can_write(data_description, user: Optional[User] = None): return can(data_description, PERMISSION_WRITE, user) -def can_execute(data_description, user=None): +def can_execute(data_description, user: Optional[User] = None): return can(data_description, PERMISSION_EXECUTE, user) diff --git a/tdp_core/security/store/dummy_store.py b/tdp_core/security/store/dummy_store.py index f95497fb5..00ffeb80c 100644 --- a/tdp_core/security/store/dummy_store.py +++ b/tdp_core/security/store/dummy_store.py @@ -26,7 +26,6 @@ def __init__(self): self._users = [ DummyUser( id=v["name"], - name=v["name"], roles=v["roles"], password=v["password"], salt=v["salt"], diff --git a/tdp_core/server/mainapp.py b/tdp_core/server/mainapp.py index b25eabc54..c342d6473 100644 --- a/tdp_core/server/mainapp.py +++ b/tdp_core/server/mainapp.py @@ -3,7 +3,8 @@ import re from builtins import next -from flask import Flask, safe_join, send_from_directory +from flask import Flask, send_from_directory +from werkzeug.security import safe_join from .. import manager @@ -37,7 +38,7 @@ def _deliver(path): return "This page does not exist", 404 # serve public - if os.path.exists(safe_join(public_dir, path)): + if os.path.exists(safe_join(public_dir, path)): # type: ignore return send_from_directory(public_dir, path) # check all plugins @@ -49,11 +50,11 @@ def _deliver(path): plugin = next((p for p in manager.registry.plugins if p.id == plugin_id), None) if plugin: - dpath = safe_join(plugin.folder, "/".join(elems)) - if os.path.exists(dpath): + dpath = safe_join(plugin.folder, "/".join(elems)) # type: ignore + if os.path.exists(dpath): # type: ignore # send_static_file will guess the correct MIME type # print 'sending',dpath - return send_from_directory(plugin.folder, "/".join(elems)) + return send_from_directory(plugin.folder, "/".join(elems)) # type: ignore return "This page does not exist", 404 @@ -92,11 +93,11 @@ def _generate_index(): + "" ) text.append('") text.append("") @@ -128,9 +129,9 @@ def build_info(): for p in manager.registry.plugins: if p.id == "tdp_core": - build_info["name"] = p.name - build_info["version"] = p.version - build_info["resolved"] = p.resolved + build_info["name"] = p.name # type: ignore + build_info["version"] = p.version # type: ignore + build_info["resolved"] = p.resolved # type: ignore else: desc = dict(name=p.name, version=p.version, resolved=p.resolved) all_plugins.append(desc) diff --git a/tdp_core/server/utils.py b/tdp_core/server/utils.py index 9450fc302..7285356b1 100644 --- a/tdp_core/server/utils.py +++ b/tdp_core/server/utils.py @@ -2,9 +2,10 @@ import logging import time import traceback +from typing import Optional -import werkzeug from flask import Flask, jsonify +from werkzeug.exceptions import HTTPException from .. import manager @@ -28,13 +29,13 @@ def init_legacy_app(app: Flask): if manager.settings.tdp_core: app.config["SECRET_KEY"] = manager.settings.secret_key - @app.errorhandler(werkzeug.exceptions.HTTPException) - @app.errorhandler(Exception) + @app.errorhandler(HTTPException) + @app.errorhandler(Exception) # type: ignore async def handle_exception(e): """Handles Flask exceptions by returning the same JSON response as FastAPI#HTTPException would.""" _log.exception("An error occurred in Flask") # Extract status information if a Flask#HTTPException is given, otherwise return 500 with exception information - status_code = e.code if isinstance(e, werkzeug.exceptions.HTTPException) else 500 + status_code = e.code if isinstance(e, HTTPException) else 500 detail = detail_from_exception(e) # Exact same response as the one from FastAPI#HTTPException. return jsonify({"detail": detail or http.HTTPStatus(status_code).phrase}), status_code @@ -63,7 +64,7 @@ def load_after_server_started_hooks(): _log.info("Elapsed time for server startup hooks: %d seconds", time.time() - start) -def detail_from_exception(e: Exception) -> str: +def detail_from_exception(e: Exception) -> Optional[str]: """Returns the full stacktrace in development mode and just the error message in production mode.""" # Always return full stacktrace in development mode if manager.settings.is_development_mode: @@ -71,7 +72,7 @@ def detail_from_exception(e: Exception) -> str: traceback.format_exception(None, e, e.__traceback__) ) # Exception specific returns - if isinstance(e, werkzeug.exceptions.HTTPException): + if isinstance(e, HTTPException): return e.description # Fallback to the string representation of the exception return str(e) diff --git a/tdp_core/server/visyn_server.py b/tdp_core/server/visyn_server.py index 281adfb5e..3a0239657 100644 --- a/tdp_core/server/visyn_server.py +++ b/tdp_core/server/visyn_server.py @@ -2,7 +2,7 @@ import logging.config import sys import threading -from typing import Dict, Optional +from typing import Any, Dict, Optional from fastapi import FastAPI from fastapi.middleware.wsgi import WSGIMiddleware @@ -16,7 +16,7 @@ def create_visyn_server( - *, fast_api_args: Optional[Dict] = {}, start_cmd: Optional[str] = None, workspace_config: Optional[Dict] = None + *, fast_api_args: Dict[str, Any] = {}, start_cmd: Optional[str] = None, workspace_config: Optional[Dict] = None ) -> FastAPI: """ Create a new FastAPI instance while ensuring that the configuration and plugins are loaded, extension points are registered, database migrations are executed, ... @@ -84,7 +84,7 @@ def create_visyn_server( from ..dbmigration.manager import DBMigrationManager app.state.db_migration = manager.db_migration = DBMigrationManager() - manager.db_migration.init_app(app, manager.registry.list("tdp-sql-database-migration")) + manager.db_migration.init_app(app, manager.registry.list("tdp-sql-database-migration")) # type: ignore from ..security.manager import create_security_manager @@ -112,8 +112,13 @@ def create_visyn_server( namespace_plugins = manager.registry.list("namespace") _log.info(f"Registering {len(namespace_plugins)} legacy namespaces via WSGIMiddleware") for p in namespace_plugins: - _log.info(f"Registering legacy namespace: {p.namespace}") - app.mount(p.namespace, WSGIMiddleware(init_legacy_app(p.load().factory()))) + namespace = p.namespace # type: ignore + _log.info(f"Registering legacy namespace: {namespace}") + + sub_app = p.load().factory() + init_legacy_app(sub_app) + + app.mount(namespace, WSGIMiddleware(sub_app)) # Load all FastAPI apis router_plugins = manager.registry.list("fastapi_router") @@ -142,7 +147,7 @@ def create_visyn_server( app.add_middleware(RequestContextMiddleware) # TODO: Move up? - app.add_api_route("/health", health) - app.add_api_route("/api/buildInfo.json", build_info) + app.add_api_route("/health", health) # type: ignore + app.add_api_route("/api/buildInfo.json", build_info) # type: ignore return app diff --git a/tdp_core/settings/model.py b/tdp_core/settings/model.py index 18bfbaf21..375dfb72f 100644 --- a/tdp_core/settings/model.py +++ b/tdp_core/settings/model.py @@ -100,11 +100,11 @@ class GlobalSettings(BaseSettings): jwt_header_name: str = "Authorization" jwt_header_type: str = "Bearer" jwt_cookie_secure: bool = False - jwt_cookie_samesite: str = "Strict" + jwt_cookie_samesite: Optional[Literal["lax", "strict", "none"]] = "strict" jwt_access_cookie_path: str = "/" # General settings for tdp_core - tdp_core: TDPCoreSettings = TDPCoreSettings() + tdp_core: TDPCoreSettings = TDPCoreSettings() # type: ignore @property def is_development_mode(self) -> bool: diff --git a/tdp_core/settings/router.py b/tdp_core/settings/router.py index a65b0e0a9..93c4fd11f 100644 --- a/tdp_core/settings/router.py +++ b/tdp_core/settings/router.py @@ -8,17 +8,17 @@ @router.get("/{path:path}") def get_config_path(path: str): - path = path.split("/") - key = path[0] + split_path = path.split("/") + key = split_path[0] plugin = next((p for p in manager.registry.list("tdp-config-safe-keys") if p.id == key), None) if plugin is None: raise HTTPException(status_code=404, detail=f'config key "{key}" not found') - path[0] = plugin.configKey + split_path[0] = plugin.configKey # type: ignore - return manager.settings.get_nested(".".join(path)) + return manager.settings.get_nested(".".join(split_path)) def create(): diff --git a/tdp_core/settings/utils.py b/tdp_core/settings/utils.py index 7cfc07e62..ca8af878e 100644 --- a/tdp_core/settings/utils.py +++ b/tdp_core/settings/utils.py @@ -27,4 +27,4 @@ def load_config_file(path: str) -> Dict[str, Any]: Opens any `*.json` file and loads it via `jsoncfg.loads`. """ with codecs.open(path, "r", "utf-8") as fi: - return jsoncfg.loads(fi.read()) + return jsoncfg.loads(fi.read()) or {} diff --git a/tdp_core/sql_use_gevent.py b/tdp_core/sql_use_gevent.py deleted file mode 100644 index b1834822a..000000000 --- a/tdp_core/sql_use_gevent.py +++ /dev/null @@ -1,44 +0,0 @@ -""" -based on http://www.jasonamyers.com/gevent-postgres-sqlalchemy -try to parallelize psycopg2 for gevent -""" -import logging - -_log = logging.getLogger(__name__) - - -def make_psycopg_green(): - """Configure Psycopg to be used with gevent in non-blocking way.""" - if not hasattr(extensions, "set_wait_callback"): - raise ImportError("support for coroutines not available in this Psycopg version (%s)" % psycopg2.__version__) - - extensions.set_wait_callback(gevent_wait_callback) - - -def gevent_wait_callback(conn, timeout=None): - """A wait callback useful to allow gevent to work with Psycopg.""" - while True: - state = conn.poll() - if state == extensions.POLL_OK: - break - elif state == extensions.POLL_READ: - wait_read(conn.fileno(), timeout=timeout) - elif state == extensions.POLL_WRITE: - wait_write(conn.fileno(), timeout=timeout) - else: - raise psycopg2.OperationalError("Bad result from poll: %r" % state) - - -try: - import psycopg2 - from gevent.socket import wait_read, wait_write - from psycopg2 import extensions - - _log.info("patching psycopg2 to be green") - make_psycopg_green() -except ImportError: - pass # nothing to do - - -def create(): - pass diff --git a/tdp_core/storage.py b/tdp_core/storage.py index 85798b0c3..e91a50a90 100644 --- a/tdp_core/storage.py +++ b/tdp_core/storage.py @@ -15,7 +15,7 @@ app = Flask(__name__) -@app.route("/namedsets/", methods=["GET", "POST"]) +@app.route("/namedsets/", methods=["GET", "POST"]) # type: ignore @etag def list_namedset(): db = MongoClient(c.host, c.port)[c.db_namedsets] @@ -52,7 +52,7 @@ def list_namedset(): return jsonify(entry) -@app.route("/namedset/", methods=["GET", "DELETE", "PUT"]) +@app.route("/namedset/", methods=["GET", "DELETE", "PUT"]) # type: ignore @etag def get_namedset(namedset_id): db = MongoClient(c.host, c.port)[c.db_namedsets] @@ -128,7 +128,7 @@ def post_attachment(): return id -@app.route("/attachment/", methods=["GET", "DELETE", "PUT"]) +@app.route("/attachment/", methods=["GET", "DELETE", "PUT"]) # type: ignore @etag def get_attachment(attachment_id): db = MongoClient(c.host, c.port)[c.db_namedsets] diff --git a/tdp_core/swagger.py b/tdp_core/swagger.py index dd46a6c60..7f067b738 100644 --- a/tdp_core/swagger.py +++ b/tdp_core/swagger.py @@ -1,8 +1,10 @@ import json import logging from collections import OrderedDict +from typing import Any -from flask import Flask, Response, render_template +from flask import Flask, render_template +from flask.wrappers import Response from jinja2 import Template from . import db, manager @@ -22,7 +24,7 @@ def _gen(): here = path.abspath(path.dirname(__file__)) files = [path.join(here, "swagger", p) for p in ["swagger.yml", "db.yml"]] # , 'proxy.yml', 'storage.yml']] - base = yaml_load(files) + base: dict[str, Any] = yaml_load(files) # type: ignore base["paths"] = OrderedDict(sorted(base["paths"].items(), key=lambda t: t[0])) with io.open(path.join(here, "swagger", "view.tmpl.yml"), "r", encoding="utf-8") as f: @@ -149,7 +151,7 @@ def to_type(t): view_yaml = template.render(**keys) # _log.info(view_yaml) part = safe_load(view_yaml) - base = data_merge(base, part) + base = data_merge(base, part) # type: ignore # post process using extensions for p in manager.registry.list("tdp-swagger-postprocessor"): diff --git a/tdp_core/tests/fixtures/postgres_db.py b/tdp_core/tests/fixtures/postgres_db.py index eb4e1bac0..4cb40a934 100644 --- a/tdp_core/tests/fixtures/postgres_db.py +++ b/tdp_core/tests/fixtures/postgres_db.py @@ -1,3 +1,5 @@ +from typing import Generator + import pytest from pytest_postgresql.executor import PostgreSQLExecutor from pytest_postgresql.factories import postgresql_proc @@ -11,7 +13,7 @@ class PostgreSQLExecutorWithUrl(PostgreSQLExecutor): @pytest.fixture(scope="session") -def postgres_db(postgresql_proc) -> PostgreSQLExecutorWithUrl: +def postgres_db(postgresql_proc) -> Generator[PostgreSQLExecutorWithUrl, None, None]: d = postgresql_proc d.url = f"postgresql://{d.user}:{d.password}@{d.host}:{d.port}/{d.dbname}" janitor = DatabaseJanitor(d.user, d.host, d.port, d.dbname, d.version, d.password) diff --git a/tdp_core/tests/test_rdkit_img.py b/tdp_core/tests/test_rdkit_img.py index dee080027..30a837195 100644 --- a/tdp_core/tests/test_rdkit_img.py +++ b/tdp_core/tests/test_rdkit_img.py @@ -36,7 +36,7 @@ def test_invalid(client: TestClient, structure): def test_valid(client: TestClient, structure, expected): res = client.get("/api/rdkit/", params={"structure": structure}) assert res.status_code == 200 - assert res.headers.get("content-type").startswith("image/svg") + assert res.headers["content-type"].startswith("image/svg") hash_compare(res.content, expected) diff --git a/tdp_core/utils.py b/tdp_core/utils.py index a90d4d603..acbaf22f5 100644 --- a/tdp_core/utils.py +++ b/tdp_core/utils.py @@ -3,7 +3,8 @@ from builtins import range from typing import Union -from flask import Response, abort, make_response, request +from flask import abort, make_response, request +from flask.wrappers import Response from . import manager diff --git a/tdp_core/xlsx.py b/tdp_core/xlsx.py index ad097ce40..1abef7b27 100644 --- a/tdp_core/xlsx.py +++ b/tdp_core/xlsx.py @@ -3,7 +3,8 @@ from tempfile import NamedTemporaryFile import dateutil.parser -from flask import Flask, Response, abort, jsonify, request +from flask import Flask, abort, jsonify, request +from flask.wrappers import Response from openpyxl import Workbook, load_workbook from openpyxl.cell import WriteOnlyCell from openpyxl.styles import Font @@ -42,7 +43,7 @@ def _xlsx2json(): if not file: abort(403, "missing file") - wb = load_workbook(file, read_only=True, data_only=True) + wb = load_workbook(file, read_only=True, data_only=True) # type: ignore def convert_row(row, cols): result = {} @@ -78,7 +79,7 @@ def _xlsx2json_array(): if not file: abort(403, "missing file") - wb = load_workbook(file, read_only=True, data_only=True) + wb = load_workbook(file, read_only=True, data_only=True) # type: ignore def convert_row(row): return [_convert_value(cell.value) for cell in row] @@ -94,7 +95,7 @@ def convert_row(row): @app.route("/from_json", methods=["POST"]) def _json2xlsx(): - data = request.json + data: dict = request.json # type: ignore wb = Workbook(write_only=True) bold = Font(bold=True) @@ -102,9 +103,9 @@ def _json2xlsx(): def to_cell(v): # If the native value cannot be used as Excel value, used the stringified version instead. try: - return WriteOnlyCell(ws, value=v) + return WriteOnlyCell(ws, value=v) # type: ignore except ValueError: - return WriteOnlyCell(ws, value=str(v)) + return WriteOnlyCell(ws, value=str(v)) # type: ignore def to_header(v): c = to_cell(v) @@ -139,7 +140,7 @@ def to_value(v, coltype): @app.route("/from_json_array", methods=["POST"]) def _json_array2xlsx(): - data = request.json + data: list = request.json # type: ignore wb = Workbook(write_only=True) ws = wb.create_sheet() From 0c345518f38e33235a50aeddc1bd0d38d3dd534d Mon Sep 17 00:00:00 2001 From: Michael Puehringer Date: Thu, 22 Dec 2022 14:02:04 +0100 Subject: [PATCH 02/13] Replace BaseHTTPMiddleware with much faster ASGI equalivalent --- requirements.txt | 1 + tdp_core/dbmanager.py | 12 ++++--- tdp_core/dbview.py | 16 ++------- .../close_web_sessions_middleware.py | 33 +++++++++++-------- .../exception_handler_middleware.py | 17 ++++++---- .../middleware/request_context_middleware.py | 21 ------------ tdp_core/middleware/request_context_plugin.py | 17 ++++++++++ tdp_core/security/manager.py | 25 +++++++------- tdp_core/server/visyn_server.py | 8 +++-- 9 files changed, 75 insertions(+), 75 deletions(-) delete mode 100644 tdp_core/middleware/request_context_middleware.py create mode 100644 tdp_core/middleware/request_context_plugin.py diff --git a/requirements.txt b/requirements.txt index cfa90326d..1697da90e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,6 +20,7 @@ python-multipart==0.0.5 rdkit==2022.3.5 requests==2.28.1 SQLAlchemy==1.4.45 +starlette-context==0.3.5 urllib3==1.26.9 uvicorn[standard]==0.20.0 yamlreader==3.0.4 diff --git a/tdp_core/dbmanager.py b/tdp_core/dbmanager.py index 594c45bde..6e2a9a30a 100644 --- a/tdp_core/dbmanager.py +++ b/tdp_core/dbmanager.py @@ -8,7 +8,7 @@ from . import manager from .dbview import DBConnector from .middleware.close_web_sessions_middleware import CloseWebSessionsMiddleware -from .middleware.request_context_middleware import get_request +from .middleware.request_context_plugin import get_request _log = logging.getLogger(__name__) @@ -27,7 +27,8 @@ def init_app(self, app: FastAPI): for p in manager.registry.list("tdp-sql-database-definition"): config: Dict[str, Any] = manager.settings.get_nested(p.configKey) # type: ignore - connector: DBConnector = p.load().factory() + # Only instantiate the connector if it has a module factory, otherwise use an empty one + connector: DBConnector = p.load().factory() if p.module else DBConnector() if not connector.dburl: connector.dburl = config["dburl"] if not connector.statement_timeout: @@ -93,11 +94,14 @@ def create_web_session(self, engine_or_id: Union[Engine, str]) -> Session: """ session = self.create_session(engine_or_id) + r = get_request() + if not r: + raise Exception("No request found, did you use a create_web_sesssion outside of a request?") try: - existing_sessions = get_request().state.db_sessions + existing_sessions = r.state.db_sessions except (KeyError, AttributeError): existing_sessions = [] - get_request().state.db_sessions = existing_sessions + r.state.db_sessions = existing_sessions existing_sessions.append(session) return session diff --git a/tdp_core/dbview.py b/tdp_core/dbview.py index 616e84beb..1dd7bf33e 100644 --- a/tdp_core/dbview.py +++ b/tdp_core/dbview.py @@ -602,7 +602,7 @@ class DBConnector(object): basic connector object """ - def __init__(self, views, agg_score=None, mappings=None): + def __init__(self, views={}, agg_score=None, mappings=None): """ :param views: the dict of query views :param agg_score: optional specify how aggregation should be handled @@ -621,19 +621,7 @@ def dump(self, name): def create_engine(self, config) -> Engine: engine_options = config.get("engine", {}) - engine = sqlalchemy.create_engine(self.dburl, **engine_options) - try: - # Assuming that gevent monkey patched the builtin - # threading library, we're likely good to use - # SQLAlchemy's QueuePool, which is the default - # pool class. However, we need to make it use - # threadlocal connections - # https://github.com/kljensen/async-flask-sqlalchemy-example/blob/master/server.py - engine.pool._use_threadlocal = True # type: ignore - except Exception: - pass - - return engine + return sqlalchemy.create_engine(self.dburl, pool_size=30, pool_pre_ping=True, **engine_options) def create_sessionmaker(self, engine) -> sessionmaker: return sessionmaker(bind=engine) diff --git a/tdp_core/middleware/close_web_sessions_middleware.py b/tdp_core/middleware/close_web_sessions_middleware.py index 068fb5e9d..c5ac1daed 100644 --- a/tdp_core/middleware/close_web_sessions_middleware.py +++ b/tdp_core/middleware/close_web_sessions_middleware.py @@ -1,18 +1,23 @@ -from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint -from starlette.requests import Request +from fastapi import FastAPI +from .request_context_plugin import get_request -class CloseWebSessionsMiddleware(BaseHTTPMiddleware): - async def dispatch(self, request: Request, call_next: RequestResponseEndpoint): - response = await call_next(request) - try: - for db_session in request.state.db_sessions: - try: - db_session.close() - except Exception: - pass - except (KeyError, AttributeError): - pass +# Use basic ASGI middleware instead of BaseHTTPMiddleware as it is significantly faster: https://github.com/tiangolo/fastapi/issues/2696#issuecomment-768224643 +class CloseWebSessionsMiddleware: + def __init__(self, app: FastAPI): + self.app = app - return response + async def __call__(self, scope, receive, send): + await self.app(scope, receive, send) + + r = get_request() + if r: + try: + for db_session in r.state.db_sessions: + try: + db_session.close() + except Exception: + pass + except (KeyError, AttributeError): + pass diff --git a/tdp_core/middleware/exception_handler_middleware.py b/tdp_core/middleware/exception_handler_middleware.py index 31a24848e..66ee38c48 100644 --- a/tdp_core/middleware/exception_handler_middleware.py +++ b/tdp_core/middleware/exception_handler_middleware.py @@ -1,20 +1,23 @@ import logging -from fastapi import HTTPException +from fastapi import FastAPI, HTTPException from fastapi.exception_handlers import http_exception_handler -from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint -from starlette.requests import Request from ..server.utils import detail_from_exception +from .request_context_plugin import get_request -class ExceptionHandlerMiddleware(BaseHTTPMiddleware): - async def dispatch(self, request: Request, call_next: RequestResponseEndpoint): +# Use basic ASGI middleware instead of BaseHTTPMiddleware as it is significantly faster: https://github.com/tiangolo/fastapi/issues/2696#issuecomment-768224643 +class ExceptionHandlerMiddleware: + def __init__(self, app: FastAPI): + self.app = app + + async def __call__(self, scope, receive, send): try: - return await call_next(request) + await self.app(scope, receive, send) except Exception as e: logging.exception("An error occurred in FastAPI") return await http_exception_handler( - request, + get_request(), # type: ignore e if isinstance(e, HTTPException) else HTTPException(status_code=500, detail=detail_from_exception(e)), ) diff --git a/tdp_core/middleware/request_context_middleware.py b/tdp_core/middleware/request_context_middleware.py deleted file mode 100644 index d102332d6..000000000 --- a/tdp_core/middleware/request_context_middleware.py +++ /dev/null @@ -1,21 +0,0 @@ -from contextvars import ContextVar -from typing import Optional - -from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint -from starlette.requests import Request - -REQUEST_CTX_KEY = "fastapi_request" - -_request_ctx_var: ContextVar[Optional[Request]] = ContextVar(REQUEST_CTX_KEY, default=None) - - -def get_request() -> Request: - return _request_ctx_var.get() # type: ignore TODO: It is None in non-request context - - -class RequestContextMiddleware(BaseHTTPMiddleware): - async def dispatch(self, request: Request, call_next: RequestResponseEndpoint): - request_ctx_key = _request_ctx_var.set(request) - response = await call_next(request) - _request_ctx_var.reset(request_ctx_key) - return response diff --git a/tdp_core/middleware/request_context_plugin.py b/tdp_core/middleware/request_context_plugin.py new file mode 100644 index 000000000..ed2b65b33 --- /dev/null +++ b/tdp_core/middleware/request_context_plugin.py @@ -0,0 +1,17 @@ +from typing import Optional + +from starlette.requests import HTTPConnection, Request +from starlette_context import context +from starlette_context.plugins.base import Plugin + + +def get_request() -> Request | None: + return context.get("request") + + +class RequestContextPlugin(Plugin): + # The returned value will be inserted in the context with this key + key = "request" + + async def process_request(self, request: Request | HTTPConnection) -> Optional[Request | HTTPConnection]: + return request diff --git a/tdp_core/security/manager.py b/tdp_core/security/manager.py index 3a26a358a..5af45a013 100644 --- a/tdp_core/security/manager.py +++ b/tdp_core/security/manager.py @@ -9,7 +9,7 @@ from fastapi.security.utils import get_authorization_scheme_param from .. import manager -from ..middleware.request_context_middleware import get_request +from ..middleware.request_context_plugin import get_request from .model import ANONYMOUS_USER, LogoutReturnValue, User from .store.base_store import BaseStore @@ -119,17 +119,18 @@ def _delegate_stores_until_not_none(self, store_method_name: str, *args): @property def current_user(self) -> Optional[User]: try: - req = get_request() - # Fetch the existing user from the request if there is any - try: - user = req.state.user - if user: - return user - except (KeyError, AttributeError): - pass - # If there is no user, try to load it from the request and store it in the request - user = req.state.user = self.load_from_request(get_request()) - return user + r = get_request() + if r: + # Fetch the existing user from the request if there is any + try: + user = r.state.user + if user: + return user + except (KeyError, AttributeError): + pass + # If there is no user, try to load it from the request and store it in the request + user = r.state.user = self.load_from_request(r) + return user except HTTPException: return None except Exception: diff --git a/tdp_core/server/visyn_server.py b/tdp_core/server/visyn_server.py index 3a0239657..bb5c2f510 100644 --- a/tdp_core/server/visyn_server.py +++ b/tdp_core/server/visyn_server.py @@ -8,6 +8,7 @@ from fastapi.middleware.wsgi import WSGIMiddleware from pydantic import create_model from pydantic.utils import deep_update +from starlette_context.middleware import RawContextMiddleware from ..settings.constants import default_logging_dict @@ -60,7 +61,6 @@ def create_visyn_server( ) from ..middleware.exception_handler_middleware import ExceptionHandlerMiddleware - from ..middleware.request_context_middleware import RequestContextMiddleware # TODO: For some reason, a @app.exception_handler(Exception) is not called here. We use a middleware instead. app.add_middleware(ExceptionHandlerMiddleware) @@ -143,8 +143,10 @@ def create_visyn_server( for p in plugins: p.plugin.init_app(app) - # Add middleware to access Request "outside" - app.add_middleware(RequestContextMiddleware) + from ..middleware.request_context_plugin import RequestContextPlugin + + # Use starlette-context to store the current request globally, i.e. accessible via context['request'] + app.add_middleware(RawContextMiddleware, plugins=(RequestContextPlugin(),)) # TODO: Move up? app.add_api_route("/health", health) # type: ignore From b7ba9e15694250944bac7a22255d4f179a39b409 Mon Sep 17 00:00:00 2001 From: Michael Puehringer Date: Fri, 23 Dec 2022 08:19:15 +0100 Subject: [PATCH 03/13] Increase anyio threads to 100 --- tdp_core/server/mainapp.py | 2 +- tdp_core/server/visyn_server.py | 8 ++++++++ tdp_core/settings/model.py | 7 ++++++- 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/tdp_core/server/mainapp.py b/tdp_core/server/mainapp.py index c342d6473..a62d50242 100644 --- a/tdp_core/server/mainapp.py +++ b/tdp_core/server/mainapp.py @@ -140,7 +140,7 @@ def build_info(): # health check for docker-compose, kubernetes -def health(): +async def health(): return "ok" diff --git a/tdp_core/server/visyn_server.py b/tdp_core/server/visyn_server.py index bb5c2f510..629bdd5e5 100644 --- a/tdp_core/server/visyn_server.py +++ b/tdp_core/server/visyn_server.py @@ -4,6 +4,7 @@ import threading from typing import Any, Dict, Optional +import anyio from fastapi import FastAPI from fastapi.middleware.wsgi import WSGIMiddleware from pydantic import create_model @@ -152,4 +153,11 @@ def create_visyn_server( app.add_api_route("/health", health) # type: ignore app.add_api_route("/api/buildInfo.json", build_info) # type: ignore + @app.on_event("startup") + async def change_anyio_total_tokens(): + # FastAPI uses anyio threads to handle sync endpoint concurrently. + # This is a workaround to increase the number of threads to 100, as the default is only 40. + limiter = anyio.to_thread.current_default_thread_limiter() + limiter.total_tokens = manager.settings.tdp_core.total_anyio_tokens + return app diff --git a/tdp_core/settings/model.py b/tdp_core/settings/model.py index 375dfb72f..ccf104743 100644 --- a/tdp_core/settings/model.py +++ b/tdp_core/settings/model.py @@ -51,6 +51,11 @@ class SecuritySettings(BaseModel): class TDPCoreSettings(BaseModel): + total_anyio_tokens: int = 100 + """ + The total number of threads to use for anyio. FastAPI uses these threads to run sync routes concurrently. + """ + disable: DisableSettings = DisableSettings() enabled_plugins: List[str] = [] @@ -73,7 +78,7 @@ class TDPCoreSettings(BaseModel): "name": "sam", "salt": "2338b858597b4937ad1c5db4b524f56d", "password": "814cbf874d3da7c01327b50c96bedf7db26357e0b4be25623242a33b33861651c3efd90d5c1a6410a646f356c73adf2de473611dee158672e8ee073767dc88f2", - "roles": ["sam", "admin"], + "roles": ["sam"], }, ] ) From f2b9fbde73e57df59f448da73eada5207cf34db9 Mon Sep 17 00:00:00 2001 From: Michael Puehringer Date: Mon, 2 Jan 2023 14:11:54 +0100 Subject: [PATCH 04/13] Remove broken etag support --- tdp_core/dataset/dataset_api.py | 5 +-- tdp_core/dataset/graph/graph_api.py | 11 ++--- tdp_core/id_mapping/idtype_api.py | 3 -- .../close_web_sessions_middleware.py | 7 +++- .../exception_handler_middleware.py | 28 ++++++++++--- tdp_core/settings/constants.py | 1 + tdp_core/storage.py | 6 +-- tdp_core/utils.py | 40 +------------------ 8 files changed, 37 insertions(+), 64 deletions(-) diff --git a/tdp_core/dataset/dataset_api.py b/tdp_core/dataset/dataset_api.py index a28fd2c88..b69b3d3a2 100644 --- a/tdp_core/dataset/dataset_api.py +++ b/tdp_core/dataset/dataset_api.py @@ -5,7 +5,7 @@ from flask.wrappers import Response from .. import manager -from ..utils import etag, jsonify, to_json +from ..utils import jsonify, to_json from .dataset import add, get, iter, list_datasets, remove app = Flask(__name__) @@ -86,7 +86,6 @@ def filter_elem(elem): @app.route("/", methods=["GET", "POST"]) -@etag def _list_datasets(): if request.method == "GET": query = _to_query(request.values) @@ -111,7 +110,6 @@ def _list_datasets(): @app.route("/", methods=["PUT", "GET", "DELETE", "POST"]) -@etag def _get_dataset(dataset_id): if request.method == "PUT": return _update_dataset(dataset_id, request) @@ -128,7 +126,6 @@ def _get_dataset(dataset_id): @app.route("//desc") -@etag def _get_dataset_desc(dataset_id): d = get(dataset_id) if not d: diff --git a/tdp_core/dataset/graph/graph_api.py b/tdp_core/dataset/graph/graph_api.py index 8252acef2..6c1481971 100644 --- a/tdp_core/dataset/graph/graph_api.py +++ b/tdp_core/dataset/graph/graph_api.py @@ -1,7 +1,6 @@ from flask import abort, jsonify, request from ... import manager -from ...utils import etag def _to_desc(): @@ -87,13 +86,11 @@ def handleitem(datasetid, itemid): def add_graph_handler(app, dataset_getter): @app.route("/graph/") - @etag def list_graphs(datasetid): d = dataset_getter(datasetid, "graph") return jsonify(d.to_description()) @app.route("/graph//data") - @etag def get_graph_data(datasetid): d = dataset_getter(datasetid, "graph") formatter = resolve_formatter("graph", request.args.get("format", "json")) @@ -103,13 +100,13 @@ def get_graph_data(datasetid): app.add_url_rule( "/graph//node", "list_nodes", - etag(list_nodes), + list_nodes, methods=["GET", "POST", "DELETE"], ) app.add_url_rule( "/graph//node/", "handle_node", - etag(handle_node), + handle_node, methods=["GET", "PUT", "DELETE"], ) @@ -117,13 +114,13 @@ def get_graph_data(datasetid): app.add_url_rule( "/graph//edge", "list_edges", - etag(list_edges), + list_edges, methods=["GET", "POST", "DELETE"], ) app.add_url_rule( "/graph//edge/", "handle_edge", - etag(handle_edge), + handle_edge, methods=["GET", "PUT", "DELETE"], ) diff --git a/tdp_core/id_mapping/idtype_api.py b/tdp_core/id_mapping/idtype_api.py index 73d042528..8daa9171b 100644 --- a/tdp_core/id_mapping/idtype_api.py +++ b/tdp_core/id_mapping/idtype_api.py @@ -4,7 +4,6 @@ from .. import manager from ..dataset.dataset_def import to_idtype_description -from ..utils import etag app_idtype = Flask(__name__) @@ -12,7 +11,6 @@ @app_idtype.route("/") -@etag def _list_idtypes(): tmp = dict() # TODO: We probably don't want to have these idtypes as "all" idtypes @@ -27,7 +25,6 @@ def _list_idtypes(): @app_idtype.route("//") -@etag def _maps_to(idtype): target_id_types = manager.id_mapping.maps_to(idtype) return jsonify(target_id_types) diff --git a/tdp_core/middleware/close_web_sessions_middleware.py b/tdp_core/middleware/close_web_sessions_middleware.py index c5ac1daed..dc4b5c9e8 100644 --- a/tdp_core/middleware/close_web_sessions_middleware.py +++ b/tdp_core/middleware/close_web_sessions_middleware.py @@ -4,11 +4,16 @@ # Use basic ASGI middleware instead of BaseHTTPMiddleware as it is significantly faster: https://github.com/tiangolo/fastapi/issues/2696#issuecomment-768224643 +# Raw middlewares are actually quite complex: https://github.com/encode/starlette/blob/048643adc21e75b668567fc6bcdd3650b89044ea/starlette/middleware/errors.py#L147 class CloseWebSessionsMiddleware: def __init__(self, app: FastAPI): - self.app = app + self.app: FastAPI = app async def __call__(self, scope, receive, send): + if scope["type"] != "http": + await self.app(scope, receive, send) + return + await self.app(scope, receive, send) r = get_request() diff --git a/tdp_core/middleware/exception_handler_middleware.py b/tdp_core/middleware/exception_handler_middleware.py index 66ee38c48..70b10b0ec 100644 --- a/tdp_core/middleware/exception_handler_middleware.py +++ b/tdp_core/middleware/exception_handler_middleware.py @@ -2,22 +2,40 @@ from fastapi import FastAPI, HTTPException from fastapi.exception_handlers import http_exception_handler +from starlette.types import Message from ..server.utils import detail_from_exception -from .request_context_plugin import get_request # Use basic ASGI middleware instead of BaseHTTPMiddleware as it is significantly faster: https://github.com/tiangolo/fastapi/issues/2696#issuecomment-768224643 +# Raw middlewares are actually quite complex: https://github.com/encode/starlette/blob/048643adc21e75b668567fc6bcdd3650b89044ea/starlette/middleware/errors.py#L147 class ExceptionHandlerMiddleware: def __init__(self, app: FastAPI): - self.app = app + self.app: FastAPI = app async def __call__(self, scope, receive, send): - try: + if scope["type"] != "http": await self.app(scope, receive, send) + return + + response_started = False + + async def _send(message: Message) -> None: + nonlocal response_started, send + + if message["type"] == "http.response.start": + response_started = True + await send(message) + + try: + await self.app(scope, receive, _send) except Exception as e: logging.exception("An error occurred in FastAPI") - return await http_exception_handler( - get_request(), # type: ignore + response = await http_exception_handler( + None, # type: ignore e if isinstance(e, HTTPException) else HTTPException(status_code=500, detail=detail_from_exception(e)), ) + if not response_started: + await response(scope, receive, send) + + raise e diff --git a/tdp_core/settings/constants.py b/tdp_core/settings/constants.py index 0edff91a4..02341dfd8 100644 --- a/tdp_core/settings/constants.py +++ b/tdp_core/settings/constants.py @@ -1,5 +1,6 @@ default_logging_dict = { "version": 1, + # "disable_existing_loggers": False, "formatters": { "simple": { "format": "%(asctime)s %(levelname)s %(name)s: %(message)s", diff --git a/tdp_core/storage.py b/tdp_core/storage.py index e91a50a90..dd34758ff 100644 --- a/tdp_core/storage.py +++ b/tdp_core/storage.py @@ -7,7 +7,7 @@ import tdp_core.security as security from . import manager -from .utils import etag, fix_id, random_id +from .utils import fix_id, random_id c = manager.settings.tdp_core.mongo _log = logging.getLogger(__name__) @@ -16,7 +16,6 @@ @app.route("/namedsets/", methods=["GET", "POST"]) # type: ignore -@etag def list_namedset(): db = MongoClient(c.host, c.port)[c.db_namedsets] @@ -53,7 +52,6 @@ def list_namedset(): @app.route("/namedset/", methods=["GET", "DELETE", "PUT"]) # type: ignore -@etag def get_namedset(namedset_id): db = MongoClient(c.host, c.port)[c.db_namedsets] result = list(db.namedsets.find(dict(id=namedset_id), {"_id": 0})) @@ -110,7 +108,6 @@ def _generate_id(): @app.route("/attachment/", methods=["POST"]) -@etag def post_attachment(): """ simple attachment management @@ -129,7 +126,6 @@ def post_attachment(): @app.route("/attachment/", methods=["GET", "DELETE", "PUT"]) # type: ignore -@etag def get_attachment(attachment_id): db = MongoClient(c.host, c.port)[c.db_namedsets] result = list(db.attachments.find(dict(id=attachment_id), {"_id": 0})) diff --git a/tdp_core/utils.py b/tdp_core/utils.py index acbaf22f5..cad2ff779 100644 --- a/tdp_core/utils.py +++ b/tdp_core/utils.py @@ -3,7 +3,7 @@ from builtins import range from typing import Union -from flask import abort, make_response, request +from flask import abort, make_response from flask.wrappers import Response from . import manager @@ -84,33 +84,6 @@ def no_cache(f): return cache_control("private", "no-cache", "no-store", "max-age=0")(f) -def etag(f): - """Add entity tag (etag) handling to the decorated route.""" - import functools - - @functools.wraps(f) - def wrapped(*args, **kwargs): - if request.method not in ["GET", "HEAD"]: - # etags only make sense for request that are cacheable, so only - # GET and HEAD requests are allowed - return f(*args, **kwargs) - - # invoke the wrapped function and generate a response object from - # its result - rv = f(*args, **kwargs) - rv = make_response(rv) - - # if the response is not a code 200 OK then we let it through - # unchanged - if rv.status_code != 200 or rv.direct_passthrough or not rv.implicit_sequence_conversion: - return rv - - rv.add_etag() - return rv.make_conditional(request) - - return wrapped - - def fix_id(id): """ fixes the id such that is it a resource identifier @@ -228,14 +201,3 @@ def jsonify(obj, *args, **kwargs): :return: """ return Response(to_json(obj, *args, **kwargs), mimetype="application/json; charset=utf-8") - - -def glob_recursivly(path, match): - import fnmatch - import os - - for dirpath, dirnames, files in os.walk(path): - if match is None: - return None - for f in fnmatch.filter(files, match): - yield os.path.join(dirpath, f) From daf9fa7daaf639a932df14b0c65761b60ac2067d Mon Sep 17 00:00:00 2001 From: Michael Puehringer Date: Mon, 2 Jan 2023 15:32:53 +0100 Subject: [PATCH 05/13] Improved logging --- .vscode/launch.json | 15 +++++++++++++ tdp_core/dbmigration/manager.py | 5 ++--- tdp_core/id_mapping/manager.py | 2 +- .../exception_handler_middleware.py | 2 +- tdp_core/plugin/parser.py | 6 +++--- tdp_core/security/manager.py | 8 +++---- tdp_core/security/store/dummy_store.py | 5 ----- tdp_core/server/utils.py | 13 ++++++------ tdp_core/server/visyn_server.py | 18 +++++++++------- tdp_core/settings/constants.py | 21 +++++++++++-------- tdp_core/settings/utils.py | 2 +- 11 files changed, 56 insertions(+), 41 deletions(-) create mode 100644 .vscode/launch.json diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 000000000..5a2ce1b25 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,15 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Python: Module", + "type": "python", + "request": "launch", + "module": "tdp_core", + "justMyCode": false + } + ] +} \ No newline at end of file diff --git a/tdp_core/dbmigration/manager.py b/tdp_core/dbmigration/manager.py index 454af2e3f..a4f473b31 100644 --- a/tdp_core/dbmigration/manager.py +++ b/tdp_core/dbmigration/manager.py @@ -156,12 +156,11 @@ def __init__(self): self._migrations: Dict[str, DBMigration] = dict() def init_app(self, app: FastAPI, plugins: List[AExtensionDesc] = []): - _log.info("Initializing DBMigrationManager") - + _log.info(f"Initializing DBMigrationManager with {', '.join([p.id for p in plugins]) or 'no plugins'}") auto_upgrade_default = manager.settings.tdp_core.migrations.autoUpgrade for p in plugins: - _log.info("DBMigration found: %s", p.id) + _log.info(f"Database migration found: {p.id}") # TODO: The AExtensionDesc doesn't have any typing information, so we need to cast it to Any here p: Any = p diff --git a/tdp_core/id_mapping/manager.py b/tdp_core/id_mapping/manager.py index e3d0f573a..d0d87ef68 100644 --- a/tdp_core/id_mapping/manager.py +++ b/tdp_core/id_mapping/manager.py @@ -198,9 +198,9 @@ def search(self, from_idtype, to_idtype, query, max_results=None): def create_id_mapping_manager() -> MappingManager: - _log.info("Creating mapping_manager") # Load mapping providers providers = [] for plugin in manager.registry.list("mapping_provider"): providers = providers + list(plugin.load().factory()) + _log.info(f"Initializing MappingManager with {len(providers)} provider(s)") return MappingManager(providers) diff --git a/tdp_core/middleware/exception_handler_middleware.py b/tdp_core/middleware/exception_handler_middleware.py index 70b10b0ec..aa1aff7ec 100644 --- a/tdp_core/middleware/exception_handler_middleware.py +++ b/tdp_core/middleware/exception_handler_middleware.py @@ -30,7 +30,7 @@ async def _send(message: Message) -> None: try: await self.app(scope, receive, _send) except Exception as e: - logging.exception("An error occurred in FastAPI") + logging.exception(repr(e)) response = await http_exception_handler( None, # type: ignore e if isinstance(e, HTTPException) else HTTPException(status_code=500, detail=detail_from_exception(e)), diff --git a/tdp_core/plugin/parser.py b/tdp_core/plugin/parser.py index f6b4b77d7..add01d918 100644 --- a/tdp_core/plugin/parser.py +++ b/tdp_core/plugin/parser.py @@ -84,7 +84,7 @@ def load_all_plugins() -> List[EntryPointPlugin]: plugins: List[EntryPointPlugin] = [p for p in _find_entry_point_plugins() if not is_disabled_plugin(p)] plugins.sort(key=lambda p: p.id) - _log.info(f"Discovered {len(plugins)} plugins: {', '.join([d.id for d in plugins])}") + _log.info(f"Discovered {len(plugins)} plugin(s): {', '.join([d.id for d in plugins])}") return plugins @@ -95,7 +95,7 @@ def get_extensions_from_plugins(plugins: List[EntryPointPlugin]) -> List: reg = RegHelper(plugin) plugin.plugin.register(reg) ext = [r for r in reg if not is_disabled_extension(r, "python", plugin)] - logging.info(f"plugin {plugin.id} registered {len(ext)} extension(s)") + _log.info(f"Plugin {plugin.id} registered {len(ext)} extension(s)") plugin.extensions = ext server_extensions.extend(ext) @@ -111,7 +111,7 @@ def get_config_from_plugins(plugins: List[EntryPointPlugin]) -> Tuple[List[Dict[ for plugin in plugins: plugin_settings_model = plugin.plugin.setting_class if plugin_settings_model: - logging.info(f"Plugin {plugin.id} has a settings model") + _log.info(f"Plugin {plugin.id} has a settings model") # Load the class of the config and wrap it in a tuple like (, ...), # such that pydantic can use it as type-hint in the create_model class. # Otherwise, it would except to be the default value... diff --git a/tdp_core/security/manager.py b/tdp_core/security/manager.py index 5af45a013..40515c2b1 100644 --- a/tdp_core/security/manager.py +++ b/tdp_core/security/manager.py @@ -232,13 +232,13 @@ def create_security_manager(): """ :return: the security manager """ - _log.info("Creating security_manager") - user_stores = list(filter(None, [p.load().factory() for p in manager.registry.list("user_stores")])) if len(user_stores) == 0 or manager.settings.tdp_core.alwaysAppendDummyStore: - from .store import dummy_store + from .store.dummy_store import DummyStore + + user_stores.append(DummyStore()) - user_stores.append(dummy_store.create()) + _log.info(f"Initializing SecurityManager with {', '.join([s.__class__.__name__ for s in user_stores]) or 'no user stores'}") return SecurityManager(user_stores=user_stores) diff --git a/tdp_core/security/store/dummy_store.py b/tdp_core/security/store/dummy_store.py index 00ffeb80c..808e5c04b 100644 --- a/tdp_core/security/store/dummy_store.py +++ b/tdp_core/security/store/dummy_store.py @@ -50,8 +50,3 @@ def login(self, username, extra_fields={}): def logout(self, user): pass - - -def create(): - _log.info("Creating dummy store") - return DummyStore() diff --git a/tdp_core/server/utils.py b/tdp_core/server/utils.py index 7285356b1..04073c59b 100644 --- a/tdp_core/server/utils.py +++ b/tdp_core/server/utils.py @@ -33,7 +33,7 @@ def init_legacy_app(app: Flask): @app.errorhandler(Exception) # type: ignore async def handle_exception(e): """Handles Flask exceptions by returning the same JSON response as FastAPI#HTTPException would.""" - _log.exception("An error occurred in Flask") + _log.exception(repr(e)) # Extract status information if a Flask#HTTPException is given, otherwise return 500 with exception information status_code = e.code if isinstance(e, HTTPException) else 500 detail = detail_from_exception(e) @@ -56,12 +56,13 @@ def load_after_server_started_hooks(): after_server_started_hooks = [p.load().factory() for p in manager.registry.list("after_server_started")] - _log.info(f"Found {len(after_server_started_hooks)} `after_server_started` extension points to run") + if after_server_started_hooks: + _log.info(f"Found {len(after_server_started_hooks)} after_server_started extension(s) to run") - for hook in after_server_started_hooks: - hook() + for hook in after_server_started_hooks: + hook() - _log.info("Elapsed time for server startup hooks: %d seconds", time.time() - start) + _log.info("Elapsed time for server startup hooks: %d seconds", time.time() - start) def detail_from_exception(e: Exception) -> Optional[str]: @@ -75,4 +76,4 @@ def detail_from_exception(e: Exception) -> Optional[str]: if isinstance(e, HTTPException): return e.description # Fallback to the string representation of the exception - return str(e) + return repr(e) diff --git a/tdp_core/server/visyn_server.py b/tdp_core/server/visyn_server.py index 629bdd5e5..0b3fef54a 100644 --- a/tdp_core/server/visyn_server.py +++ b/tdp_core/server/visyn_server.py @@ -36,8 +36,15 @@ def create_visyn_server( workspace_config = workspace_config if isinstance(workspace_config, dict) else load_workspace_config() manager.settings = GlobalSettings(**workspace_config) logging.config.dictConfig(manager.settings.tdp_core.logging) + + # Filter out the metrics endpoint from the access log + class EndpointFilter(logging.Filter): + def filter(self, record: logging.LogRecord) -> bool: + return "GET /metrics" not in record.getMessage() + + logging.getLogger("uvicorn.access").addFilter(EndpointFilter()) + _log = logging.getLogger(__name__) - _log.info("Workspace settings successfully loaded") # Load the initial plugins from ..plugin.parser import get_config_from_plugins, load_all_plugins @@ -48,7 +55,6 @@ def create_visyn_server( visyn_server_settings = create_model("VisynServerSettings", __base__=GlobalSettings, **plugin_settings_models) # Patch the global settings by instantiating the new settings model with the global config, all config.json(s), and pydantic models manager.settings = visyn_server_settings(**deep_update(*plugin_config_files, workspace_config)) - _log.info("All settings successfully loaded") app = FastAPI( debug=manager.settings.is_development_mode, @@ -75,8 +81,6 @@ def create_visyn_server( app.state.registry = manager.registry = Registry() manager.registry.init_app(app, plugins) - _log.info("Plugin registry successfully initialized") - from ..dbmanager import DBManager app.state.db = manager.db = DBManager() @@ -111,10 +115,9 @@ def create_visyn_server( from .utils import init_legacy_app, load_after_server_started_hooks namespace_plugins = manager.registry.list("namespace") - _log.info(f"Registering {len(namespace_plugins)} legacy namespaces via WSGIMiddleware") + _log.info(f"Registering {len(namespace_plugins)} legacy namespace(s) via WSGIMiddleware") for p in namespace_plugins: namespace = p.namespace # type: ignore - _log.info(f"Registering legacy namespace: {namespace}") sub_app = p.load().factory() init_legacy_app(sub_app) @@ -123,10 +126,9 @@ def create_visyn_server( # Load all FastAPI apis router_plugins = manager.registry.list("fastapi_router") - _log.info(f"Registering {len(router_plugins)} API-routers") + _log.info(f"Registering {len(router_plugins)} FastAPI router(s)") # Load all namespace plugins as WSGIMiddleware plugins for p in router_plugins: - _log.info(f"Registering router: {p.id}") app.include_router(p.load().factory()) # load `after_server_started` extension points which are run immediately after server started, diff --git a/tdp_core/settings/constants.py b/tdp_core/settings/constants.py index 02341dfd8..c364fcb54 100644 --- a/tdp_core/settings/constants.py +++ b/tdp_core/settings/constants.py @@ -1,19 +1,22 @@ default_logging_dict = { "version": 1, - # "disable_existing_loggers": False, + "disable_existing_loggers": False, "formatters": { - "simple": { - "format": "%(asctime)s %(levelname)s %(name)s: %(message)s", - "datefmt": "%H:%M:%S", + "default": { + "()": "uvicorn.logging.DefaultFormatter", + "format": "%(levelprefix)s %(asctime)s | %(name)30s | %(message)s", + "datefmt": "%Y-%m-%d %H:%M:%S", }, - "line": {"format": "%(asctime)s %(levelname)s %(name)s(%(pathname)s:%(lineno)s): %(message)s"}, + }, + "loggers": { + "uvicorn": {"handlers": ["default"]}, }, "handlers": { - "console": { + "default": { "class": "logging.StreamHandler", - "formatter": "simple", - "stream": "ext://sys.stdout", + "formatter": "default", + "stream": "ext://sys.stderr", } }, - "root": {"level": "INFO", "handlers": ["console"]}, + "root": {"level": "INFO", "handlers": ["default"]}, } diff --git a/tdp_core/settings/utils.py b/tdp_core/settings/utils.py index ca8af878e..c8b111510 100644 --- a/tdp_core/settings/utils.py +++ b/tdp_core/settings/utils.py @@ -18,7 +18,7 @@ def load_workspace_config() -> Dict[str, Any]: _log.info(f"Loading workspace config.json from {global_}") return load_config_file(global_) else: - _log.info(f"No workspace config.json found at {global_}, using empty dict as default") + _log.info(f"No {global_} found, using empty dict") return {} From 91c1f77c3be0254dd00c24fac0bfc545f7fc9ffd Mon Sep 17 00:00:00 2001 From: Michael Puehringer Date: Tue, 3 Jan 2023 12:07:11 +0100 Subject: [PATCH 06/13] Switch to a2wsgi --- requirements.txt | 1 + tdp_core/server/visyn_server.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 1697da90e..59753c832 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ +a2wsgi==1.6.0 alembic==1.9.0 cachetools==5.2.0 fastapi[all]==0.88.0 diff --git a/tdp_core/server/visyn_server.py b/tdp_core/server/visyn_server.py index 0b3fef54a..350c493e1 100644 --- a/tdp_core/server/visyn_server.py +++ b/tdp_core/server/visyn_server.py @@ -5,8 +5,8 @@ from typing import Any, Dict, Optional import anyio +from a2wsgi import WSGIMiddleware from fastapi import FastAPI -from fastapi.middleware.wsgi import WSGIMiddleware from pydantic import create_model from pydantic.utils import deep_update from starlette_context.middleware import RawContextMiddleware From 916df17dbfef5a410c3ed567b03d67480d5d476a Mon Sep 17 00:00:00 2001 From: Michael Puehringer Date: Tue, 3 Jan 2023 15:12:01 +0100 Subject: [PATCH 07/13] Remove a2wsgi again --- requirements.txt | 2 +- tdp_core/server/visyn_server.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 59753c832..bc6bac694 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -a2wsgi==1.6.0 +# a2wsgi==1.6.0 # This WSIGMiddleware is not compatible with starlette_context alembic==1.9.0 cachetools==5.2.0 fastapi[all]==0.88.0 diff --git a/tdp_core/server/visyn_server.py b/tdp_core/server/visyn_server.py index 350c493e1..0b3fef54a 100644 --- a/tdp_core/server/visyn_server.py +++ b/tdp_core/server/visyn_server.py @@ -5,8 +5,8 @@ from typing import Any, Dict, Optional import anyio -from a2wsgi import WSGIMiddleware from fastapi import FastAPI +from fastapi.middleware.wsgi import WSGIMiddleware from pydantic import create_model from pydantic.utils import deep_update from starlette_context.middleware import RawContextMiddleware From cb2ed6909024e973282c2b1b41ae5c1baa4cef49 Mon Sep 17 00:00:00 2001 From: Michael Puehringer Date: Wed, 4 Jan 2023 11:20:10 +0100 Subject: [PATCH 08/13] Remove greenifier extension --- tdp_core/dbmanager.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/tdp_core/dbmanager.py b/tdp_core/dbmanager.py index 6e2a9a30a..ad5aba7b7 100644 --- a/tdp_core/dbmanager.py +++ b/tdp_core/dbmanager.py @@ -15,8 +15,6 @@ class DBManager(object): def __init__(self): - self._initialized = False - self.connectors: Dict[str, DBConnector] = {} self._plugins = {} self._engines = dict() @@ -47,11 +45,6 @@ def init_app(self, app: FastAPI): self.connectors[p.id] = connector def _load_engine(self, item): - if not self._initialized: - self._initialized = True - for p in manager.registry.list("greenifier"): - _log.info("run greenifier: %s", p.id) - p.load().factory() if item in self._engines: return self._engines[item] From f1bf427357cfeb82c54cd3867b0dd4216c831184 Mon Sep 17 00:00:00 2001 From: Michael Puehringer Date: Mon, 9 Jan 2023 07:53:52 +0100 Subject: [PATCH 09/13] Bump FastAPI to 0.89.0 --- requirements.txt | 2 +- tdp_core/security/jwt_router.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements.txt b/requirements.txt index bc6bac694..56367c5a9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ # a2wsgi==1.6.0 # This WSIGMiddleware is not compatible with starlette_context alembic==1.9.0 cachetools==5.2.0 -fastapi[all]==0.88.0 +fastapi[all]==0.89.0 flask-swagger-ui==3.36.0 Flask[async]==2.2.2 json-cfg==0.4.2 diff --git a/tdp_core/security/jwt_router.py b/tdp_core/security/jwt_router.py index 9c4c456e0..e379b2a56 100644 --- a/tdp_core/security/jwt_router.py +++ b/tdp_core/security/jwt_router.py @@ -33,8 +33,8 @@ def get_login(): """ -@jwt_router.post("/login", response_model=Token) -def post_login(form_data: OAuth2PasswordRequestForm = Depends()): +@jwt_router.post("/login") +def post_login(form_data: OAuth2PasswordRequestForm = Depends()) -> Token: user = manager.security.login(form_data.username, {"password": form_data.password}) if not user: raise HTTPException( @@ -51,7 +51,7 @@ def post_login(form_data: OAuth2PasswordRequestForm = Depends()): # And as cookie (for easier frontend communication) add_access_token_to_response(response, access_token) - return response + return response # type: ignore @jwt_router.post("/logout") From 4a62e67057beba00ce114c027f7e8b267d2dfeba Mon Sep 17 00:00:00 2001 From: Michael Puehringer Date: Mon, 9 Jan 2023 20:59:27 +0100 Subject: [PATCH 10/13] Add pytest in settings.json --- .vscode/settings.json | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 .vscode/settings.json diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 000000000..16cefc58d --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,7 @@ +{ + "python.testing.pytestArgs": [ + "tdp_core" + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true +} \ No newline at end of file From 190d11473b8d1aa4c5b3af2c8d98935c27fae1f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20P=C3=BChringer?= <51900829+puehringer@users.noreply.github.com> Date: Wed, 11 Jan 2023 16:08:00 +0100 Subject: [PATCH 11/13] Set SQLAlchemy version to >= 1.4.40 and <= 1.4.46 to allow SQLModel > 0.0.6 (#819) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 56367c5a9..ded67e904 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,7 +20,7 @@ python-memcached==1.59 python-multipart==0.0.5 rdkit==2022.3.5 requests==2.28.1 -SQLAlchemy==1.4.45 +SQLAlchemy>=1.4.40,<=1.4.46 starlette-context==0.3.5 urllib3==1.26.9 uvicorn[standard]==0.20.0 From 60074b0a78641aa83f3fe15a2077cdd637280b40 Mon Sep 17 00:00:00 2001 From: Michael Puehringer Date: Wed, 11 Jan 2023 17:20:21 +0100 Subject: [PATCH 12/13] Loosen Flask and marshmallow-sqlalchemy versions for superset --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index ded67e904..89c5937b2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,9 +3,9 @@ alembic==1.9.0 cachetools==5.2.0 fastapi[all]==0.89.0 flask-swagger-ui==3.36.0 -Flask[async]==2.2.2 +Flask[async]>=2.1.0,<=2.2.2 json-cfg==0.4.2 -marshmallow-sqlalchemy==0.28.1 +marshmallow-sqlalchemy>=0.26.0,<=0.28.1 marshmallow==3.19.0 openpyxl==3.0.9 Pillow==9.3.0 From 1bf74d3053d2005ed77b74c8fc85e3c420a9ea59 Mon Sep 17 00:00:00 2001 From: Michael Puehringer Date: Wed, 11 Jan 2023 22:12:40 +0100 Subject: [PATCH 13/13] Removed flake8, isort, pep8 in favor of ruff --- Makefile | 26 ++++++--- .../StructureImageRenderer.d.ts.map | 2 +- .../structureImage/StructureImageRenderer.js | 3 +- .../StructureImageRenderer.js.map | 2 +- requirements_dev.txt | 5 +- setup.py | 2 +- .../structureImage/StructureImageRenderer.ts | 3 +- tdp_core/dataset/dataset.py | 14 ++--- tdp_core/dataset/dataset_api.py | 21 ++++--- tdp_core/dataset/dataset_def.py | 11 ++-- tdp_core/dataset/graph/graph.py | 27 +++++---- tdp_core/dataset/graph/graph_api.py | 2 +- tdp_core/db.py | 37 ++++++------- tdp_core/dbmanager.py | 18 +++--- tdp_core/dbmigration/manager.py | 28 ++++++---- tdp_core/dbview.py | 32 +++++------ tdp_core/encoder/bytes_to_string_encoder.py | 2 +- tdp_core/encoder/json_encoder.py | 3 +- tdp_core/encoder/set_encoder.py | 2 +- tdp_core/formatter.py | 4 +- tdp_core/graph.py | 52 +++++++++--------- tdp_core/id_mapping/idtype_api.py | 2 +- tdp_core/id_mapping/manager.py | 16 +++--- tdp_core/mapping_table.py | 4 +- .../close_web_sessions_middleware.py | 10 ++-- tdp_core/middleware/request_context_plugin.py | 4 +- tdp_core/mol_img/img_api.py | 19 ++++--- tdp_core/mol_img/models.py | 8 +-- tdp_core/mol_img/util/molecule.py | 4 +- tdp_core/plugin/model.py | 14 ++--- tdp_core/plugin/parser.py | 19 +++---- tdp_core/plugin/registry.py | 27 ++++----- tdp_core/plugin/router.py | 2 +- tdp_core/proxy.py | 2 +- tdp_core/security/__init__.py | 16 +++++- tdp_core/security/manager.py | 44 +++++++-------- tdp_core/security/model.py | 10 ++-- tdp_core/security/permissions.py | 15 ++--- tdp_core/security/store/alb_security_store.py | 5 +- tdp_core/security/store/base_store.py | 21 ++++--- tdp_core/security/store/dummy_store.py | 4 +- tdp_core/security/store/no_security_store.py | 3 +- tdp_core/server/cmd.py | 4 +- tdp_core/server/mainapp.py | 5 +- tdp_core/server/utils.py | 3 +- tdp_core/server/visyn_server.py | 6 +- tdp_core/settings/model.py | 24 ++++---- tdp_core/settings/utils.py | 6 +- tdp_core/sql.py | 7 +-- tdp_core/sql_filter.py | 8 +-- tdp_core/storage.py | 48 ++++++++-------- tdp_core/swagger.py | 55 +++++++++---------- tdp_core/tests/fixtures/app.py | 12 ++-- tdp_core/tests/test_custom_encoders.py | 6 +- tdp_core/tests/test_mapper.py | 24 ++++---- tdp_core/tests/test_rdkit_img.py | 8 +-- tdp_core/tests/test_security_login.py | 4 +- tdp_core/utils.py | 18 +++--- tdp_core/xlsx.py | 12 ++-- 59 files changed, 390 insertions(+), 405 deletions(-) diff --git a/Makefile b/Makefile index aaa203780..699870ca3 100644 --- a/Makefile +++ b/Makefile @@ -1,10 +1,9 @@ .DEFAULT_GOAL := help pkg_src = tdp_core -flake8 = flake8 $(pkg_src) setup.py -isort = isort $(pkg_src) setup.py black = black --line-length 140 $(pkg_src) setup.py -pyright = pyright $(pkg_src) +pyright = pyright $(pkg_src) setup.py +ruff = ruff $(pkg_src) setup.py --line-length 140 --select E,W,F,N,I,C,B,UP,PT,SIM,RUF --ignore E501,C901,B008 .PHONY: start ## Start the development server start: @@ -18,17 +17,16 @@ ci: check-format lint test .PHONY: format ## Auto-format the source code format: - $(isort) + $(ruff) --fix $(black) .PHONY: check-format ## Check the source code format without changes check-format: - $(isort) --check-only $(black) --check .PHONY: lint ## Run flake8 and pyright lint: - $(flake8) + $(ruff) --format=github $(pyright) .PHONY: test ## Run tests @@ -37,7 +35,7 @@ test: .PHONEY: documentation ## Generate docs documentation: - mkdocs build + echo "TODO" .PHONY: install ## Install the requirements install: @@ -47,6 +45,20 @@ install: develop: pip install -e .[develop] +.PHONY: env_encrypt ## Encrypts the current .//.env +env_encrypt: + openssl aes-256-cbc -pbkdf2 -in ./$(pkg_src)/.env -out ./$(pkg_src)/.env.enc + +.PHONY: env_decrypt ## Decrypts the .//.env.enc +env_decrypt: + @if [ -z "${ENV_PASSWORD}" ]; then \ + echo "No ENV_PASSWORD set, prompting for password..."; \ + openssl aes-256-cbc -pbkdf2 -d -in ./$(pkg_src)/.env.enc -out ./$(pkg_src)/.env; \ + else \ + echo "ENV_PASSWORD set, using it..."; \ + openssl aes-256-cbc -pbkdf2 -d -in ./$(pkg_src)/.env.enc -out ./$(pkg_src)/.env -pass env:ENV_PASSWORD; \ + fi + .PHONY: build ## Build a wheel build: python setup.py sdist bdist_wheel --dist-dir dist_python diff --git a/dist/lineup/structureImage/StructureImageRenderer.d.ts.map b/dist/lineup/structureImage/StructureImageRenderer.d.ts.map index 6323ce939..42a535c73 100644 --- a/dist/lineup/structureImage/StructureImageRenderer.d.ts.map +++ b/dist/lineup/structureImage/StructureImageRenderer.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"StructureImageRenderer.d.ts","sourceRoot":"","sources":["../../../src/lineup/structureImage/StructureImageRenderer.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,oBAAoB,EACpB,WAAW,EACX,aAAa,EAEb,cAAc,EACd,kBAAkB,EAGlB,gBAAgB,EACjB,MAAM,UAAU,CAAC;AAElB,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAwD9D,qBAAa,sBAAuB,YAAW,oBAAoB;IACjE,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAmG;IAEzH,SAAS,CAAC,GAAG,EAAE,oBAAoB,EAAE,IAAI,EAAE,WAAW,GAAG,OAAO;IAIhE,MAAM,CAAC,GAAG,EAAE,oBAAoB,GAAG,aAAa;IA6BhD,WAAW,CAAC,GAAG,EAAE,oBAAoB,EAAE,OAAO,EAAE,cAAc,GAAG,kBAAkB;IAanF,aAAa,IAAI,gBAAgB;CAOlC"} \ No newline at end of file +{"version":3,"file":"StructureImageRenderer.d.ts","sourceRoot":"","sources":["../../../src/lineup/structureImage/StructureImageRenderer.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,oBAAoB,EACpB,WAAW,EACX,aAAa,EAEb,cAAc,EACd,kBAAkB,EAGlB,gBAAgB,EACjB,MAAM,UAAU,CAAC;AAElB,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAwD9D,qBAAa,sBAAuB,YAAW,oBAAoB;IACjE,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAmG;IAEzH,SAAS,CAAC,GAAG,EAAE,oBAAoB,EAAE,IAAI,EAAE,WAAW,GAAG,OAAO;IAIhE,MAAM,CAAC,GAAG,EAAE,oBAAoB,GAAG,aAAa;IA4BhD,WAAW,CAAC,GAAG,EAAE,oBAAoB,EAAE,OAAO,EAAE,cAAc,GAAG,kBAAkB;IAanF,aAAa,IAAI,gBAAgB;CAOlC"} \ No newline at end of file diff --git a/dist/lineup/structureImage/StructureImageRenderer.js b/dist/lineup/structureImage/StructureImageRenderer.js index 83f0fb861..f8b2ce4b4 100644 --- a/dist/lineup/structureImage/StructureImageRenderer.js +++ b/dist/lineup/structureImage/StructureImageRenderer.js @@ -2,7 +2,7 @@ import { ERenderMode, renderMissingDOM, } from 'lineupjs'; import { abortAble } from 'lineupengine'; import { StructureImageColumn } from './StructureImageColumn'; import { I18nextManager } from '../../i18n'; -const template = ''; +const template = '
'; function getImageURL(structure, substructure = null, align = null) { return `/api/rdkit/?structure=${encodeURIComponent(structure)}${substructure ? `&substructure=${encodeURIComponent(substructure)}` : ''}${align ? `&align=${encodeURIComponent(align)}` : ''}`; } @@ -71,7 +71,6 @@ export class StructureImageRenderer { } n.style.backgroundImage = `url('${getImageURL(value, col.getFilter()?.filter, col.getAlign())}')`; n.title = value; - n.href = `https://pubchem.ncbi.nlm.nih.gov/#query=${value}`; }); } return null; diff --git a/dist/lineup/structureImage/StructureImageRenderer.js.map b/dist/lineup/structureImage/StructureImageRenderer.js.map index 53846e82d..b23166b50 100644 --- a/dist/lineup/structureImage/StructureImageRenderer.js.map +++ b/dist/lineup/structureImage/StructureImageRenderer.js.map @@ -1 +1 @@ -{"version":3,"file":"StructureImageRenderer.js","sourceRoot":"","sources":["../../../src/lineup/structureImage/StructureImageRenderer.ts"],"names":[],"mappings":"AAAA,OAAO,EAEL,WAAW,EAMX,gBAAgB,GAEjB,MAAM,UAAU,CAAC;AAClB,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AACzC,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAE,cAAc,EAAE,MAAM,YAAY,CAAC;AAE5C,MAAM,QAAQ,GAAG,qIAAqI,CAAC;AAEvJ,SAAS,WAAW,CAAC,SAAiB,EAAE,eAA8B,IAAI,EAAE,QAAuB,IAAI;IACrG,OAAO,yBAAyB,kBAAkB,CAAC,SAAS,CAAC,GAAG,YAAY,CAAC,CAAC,CAAC,iBAAiB,kBAAkB,CAAC,YAAY,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GACrI,KAAK,CAAC,CAAC,CAAC,UAAU,kBAAkB,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,EAClD,EAAE,CAAC;AACL,CAAC;AAED,KAAK,UAAU,UAAU,CAAC,EAAE,GAAG,EAAE,IAAI,EAAE,MAAM,EAAgD;IAC3F,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,EAAE;QAChC,OAAO,EAAE;YACP,cAAc,EAAE,kBAAkB;SACnC;QACD,gDAAgD;QAChD,MAAM;QACN,QAAQ,EAAE,QAAQ;QAClB,GAAG,CAAC,IAAI;YACN,CAAC,CAAC;gBACE,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;aAC3B;YACH,CAAC,CAAC,EAAE,CAAC;KACR,CAAC,CAAC;IACH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;QAChB,MAAM,KAAK,CAAC,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,OAAO,IAAI,QAAQ,CAAC,UAAU,CAAC,CAAC;KACxF;IACD,OAAO,QAAQ,CAAC,IAAI,EAAE,CAAC;AACzB,CAAC;AAED,KAAK,UAAU,gBAAgB,CAAC,UAAoB;IAClD,8BAA8B;IAC9B,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;QACzB,OAAO,UAAU,CAAC,EAAE,GAAG,EAAE,iBAAiB,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,CAAC,CAAC;KACjF;IAED,aAAa;IACb,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;QAC3B,MAAM,SAAS,GAAG,UAAU,CAAC,CAAC,CAAC,CAAC;QAChC,MAAM,KAAK,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;QACpE,OAAO,UAAU,CAAC,EAAE,GAAG,EAAE,oCAAoC,kBAAkB,CAAC,KAAK,CAAC,cAAc,kBAAkB,CAAC,SAAS,CAAC,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,CAAC;KACvJ;IAED,2BAA2B;IAC3B,OAAO,UAAU,CAAC,EAAE,GAAG,EAAE,yBAAyB,kBAAkB,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,CAAC;AAC1G,CAAC;AAED,SAAS,aAAa,CAAC,GAAW;IAChC,OAAO,6BAA6B,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;AAClD,CAAC;AAED,SAAS,kBAAkB,CAAC,GAAW;IACrC,OAAO,QAAQ,aAAa,CAAC,GAAG,CAAC,IAAI,CAAC;AACxC,CAAC;AAED,MAAM,OAAO,sBAAsB;IAAnC;QACW,UAAK,GAAW,cAAc,CAAC,WAAW,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,0DAA0D,CAAC,CAAC;IAuD3H,CAAC;IArDC,SAAS,CAAC,GAAyB,EAAE,IAAiB;QACpD,OAAO,GAAG,YAAY,oBAAoB,IAAI,CAAC,IAAI,KAAK,WAAW,CAAC,IAAI,IAAI,IAAI,KAAK,WAAW,CAAC,KAAK,CAAC,CAAC;IAC1G,CAAC;IAED,MAAM,CAAC,GAAyB;QAC9B,OAAO;YACL,QAAQ;YACR,MAAM,EAAE,CAAC,CAAkB,EAAE,CAAW,EAAE,EAAE;gBAC1C,IAAI,CAAC,gBAAgB,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC,EAAE;oBAChC,IAAI,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,EAAE;wBACnB,CAAC,CAAC,KAAK,CAAC,eAAe,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;wBAC5D,OAAO,IAAI,CAAC;qBACb;oBACD,MAAM,KAAK,GAAG,GAAG,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;oBAC/B,iCAAiC;oBACjC,OAAO,SAAS,CACd,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE;wBACtB,MAAM,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,GAAG,CAAC,CAAC;oBAC/C,CAAC,CAAC,CACH,CAAC,IAAI,CAAC,CAAC,KAAK,EAAE,EAAE;wBACf,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;4BAC7B,OAAO;yBACR;wBACD,CAAC,CAAC,KAAK,CAAC,eAAe,GAAG,QAAQ,WAAW,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,EAAE,EAAE,MAAM,EAAE,GAAG,CAAC,QAAQ,EAAE,CAAC,IAAI,CAAC;wBAClG,CAAC,CAAC,KAAK,GAAG,KAAK,CAAC;wBAChB,CAAC,CAAC,IAAI,GAAG,2CAA2C,KAAK,EAAE,CAAC;oBAC9D,CAAC,CAAC,CAAC;iBACJ;gBACD,OAAO,IAAI,CAAC;YACd,CAAC;SACF,CAAC;IACJ,CAAC;IAED,WAAW,CAAC,GAAyB,EAAE,OAAuB;QAC5D,OAAO;YACL,QAAQ;YACR,MAAM,EAAE,CAAC,CAAmB,EAAE,KAAoB,EAAE,EAAE;gBACpD,OAAO,CAAC,KAAK,CAAC,SAAS,CAAC,GAAG,EAAE,KAAK,EAAE,6BAA6B,EAAE,CAAC,IAAI,EAAE,EAAE;oBAC1E,OAAO,SAAS,CAAC,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,GAAQ,EAAE,EAAE;wBACrG,CAAC,CAAC,KAAK,CAAC,eAAe,GAAG,GAAG,CAAC,CAAC,CAAC,kBAAkB,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;oBAC/D,CAAC,CAAC,CAAC;gBACL,CAAC,CAAC,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAED,aAAa;QACX,cAAc;QACd,OAAO;YACL,QAAQ,EAAE,aAAa;YACvB,MAAM,EAAE,GAAG,EAAE,GAAE,CAAC;SACjB,CAAC;IACJ,CAAC;CACF"} \ No newline at end of file +{"version":3,"file":"StructureImageRenderer.js","sourceRoot":"","sources":["../../../src/lineup/structureImage/StructureImageRenderer.ts"],"names":[],"mappings":"AAAA,OAAO,EAEL,WAAW,EAMX,gBAAgB,GAEjB,MAAM,UAAU,CAAC;AAClB,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AACzC,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAE,cAAc,EAAE,MAAM,YAAY,CAAC;AAE5C,MAAM,QAAQ,GAAG,0GAA0G,CAAC;AAE5H,SAAS,WAAW,CAAC,SAAiB,EAAE,eAA8B,IAAI,EAAE,QAAuB,IAAI;IACrG,OAAO,yBAAyB,kBAAkB,CAAC,SAAS,CAAC,GAAG,YAAY,CAAC,CAAC,CAAC,iBAAiB,kBAAkB,CAAC,YAAY,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,GACrI,KAAK,CAAC,CAAC,CAAC,UAAU,kBAAkB,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,EAClD,EAAE,CAAC;AACL,CAAC;AAED,KAAK,UAAU,UAAU,CAAC,EAAE,GAAG,EAAE,IAAI,EAAE,MAAM,EAAgD;IAC3F,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,EAAE;QAChC,OAAO,EAAE;YACP,cAAc,EAAE,kBAAkB;SACnC;QACD,gDAAgD;QAChD,MAAM;QACN,QAAQ,EAAE,QAAQ;QAClB,GAAG,CAAC,IAAI;YACN,CAAC,CAAC;gBACE,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;aAC3B;YACH,CAAC,CAAC,EAAE,CAAC;KACR,CAAC,CAAC;IACH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE;QAChB,MAAM,KAAK,CAAC,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,EAAE,OAAO,IAAI,QAAQ,CAAC,UAAU,CAAC,CAAC;KACxF;IACD,OAAO,QAAQ,CAAC,IAAI,EAAE,CAAC;AACzB,CAAC;AAED,KAAK,UAAU,gBAAgB,CAAC,UAAoB;IAClD,8BAA8B;IAC9B,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;QACzB,OAAO,UAAU,CAAC,EAAE,GAAG,EAAE,iBAAiB,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,CAAC,CAAC;KACjF;IAED,aAAa;IACb,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;QAC3B,MAAM,SAAS,GAAG,UAAU,CAAC,CAAC,CAAC,CAAC;QAChC,MAAM,KAAK,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;QACpE,OAAO,UAAU,CAAC,EAAE,GAAG,EAAE,oCAAoC,kBAAkB,CAAC,KAAK,CAAC,cAAc,kBAAkB,CAAC,SAAS,CAAC,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,CAAC;KACvJ;IAED,2BAA2B;IAC3B,OAAO,UAAU,CAAC,EAAE,GAAG,EAAE,yBAAyB,kBAAkB,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,CAAC;AAC1G,CAAC;AAED,SAAS,aAAa,CAAC,GAAW;IAChC,OAAO,6BAA6B,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;AAClD,CAAC;AAED,SAAS,kBAAkB,CAAC,GAAW;IACrC,OAAO,QAAQ,aAAa,CAAC,GAAG,CAAC,IAAI,CAAC;AACxC,CAAC;AAED,MAAM,OAAO,sBAAsB;IAAnC;QACW,UAAK,GAAW,cAAc,CAAC,WAAW,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,0DAA0D,CAAC,CAAC;IAsD3H,CAAC;IApDC,SAAS,CAAC,GAAyB,EAAE,IAAiB;QACpD,OAAO,GAAG,YAAY,oBAAoB,IAAI,CAAC,IAAI,KAAK,WAAW,CAAC,IAAI,IAAI,IAAI,KAAK,WAAW,CAAC,KAAK,CAAC,CAAC;IAC1G,CAAC;IAED,MAAM,CAAC,GAAyB;QAC9B,OAAO;YACL,QAAQ;YACR,MAAM,EAAE,CAAC,CAAkB,EAAE,CAAW,EAAE,EAAE;gBAC1C,IAAI,CAAC,gBAAgB,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC,EAAE;oBAChC,IAAI,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,EAAE;wBACnB,CAAC,CAAC,KAAK,CAAC,eAAe,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;wBAC5D,OAAO,IAAI,CAAC;qBACb;oBACD,MAAM,KAAK,GAAG,GAAG,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;oBAC/B,iCAAiC;oBACjC,OAAO,SAAS,CACd,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE;wBACtB,MAAM,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,GAAG,CAAC,CAAC;oBAC/C,CAAC,CAAC,CACH,CAAC,IAAI,CAAC,CAAC,KAAK,EAAE,EAAE;wBACf,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;4BAC7B,OAAO;yBACR;wBACD,CAAC,CAAC,KAAK,CAAC,eAAe,GAAG,QAAQ,WAAW,CAAC,KAAK,EAAE,GAAG,CAAC,SAAS,EAAE,EAAE,MAAM,EAAE,GAAG,CAAC,QAAQ,EAAE,CAAC,IAAI,CAAC;wBAClG,CAAC,CAAC,KAAK,GAAG,KAAK,CAAC;oBAClB,CAAC,CAAC,CAAC;iBACJ;gBACD,OAAO,IAAI,CAAC;YACd,CAAC;SACF,CAAC;IACJ,CAAC;IAED,WAAW,CAAC,GAAyB,EAAE,OAAuB;QAC5D,OAAO;YACL,QAAQ;YACR,MAAM,EAAE,CAAC,CAAmB,EAAE,KAAoB,EAAE,EAAE;gBACpD,OAAO,CAAC,KAAK,CAAC,SAAS,CAAC,GAAG,EAAE,KAAK,EAAE,6BAA6B,EAAE,CAAC,IAAI,EAAE,EAAE;oBAC1E,OAAO,SAAS,CAAC,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,GAAQ,EAAE,EAAE;wBACrG,CAAC,CAAC,KAAK,CAAC,eAAe,GAAG,GAAG,CAAC,CAAC,CAAC,kBAAkB,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;oBAC/D,CAAC,CAAC,CAAC;gBACL,CAAC,CAAC,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAED,aAAa;QACX,cAAc;QACd,OAAO;YACL,QAAQ,EAAE,aAAa;YACvB,MAAM,EAAE,GAAG,EAAE,GAAE,CAAC;SACjB,CAAC;IACJ,CAAC;CACF"} \ No newline at end of file diff --git a/requirements_dev.txt b/requirements_dev.txt index 1e2ac037e..4193b14d3 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -1,8 +1,5 @@ black~=22.12.0 -debugpy~=1.6.4 -flake8~=6.0.0 -isort~=5.11.3 -pep8-naming~=0.13.3 pyright~=1.1.285 pytest-runner~=6.0.0 pytest~=7.2.0 +ruff==0.0.218 diff --git a/setup.py b/setup.py index 8b2a15e33..95445360f 100644 --- a/setup.py +++ b/setup.py @@ -55,7 +55,7 @@ def requirements(file): package_data={}, # Although 'package_data' is the preferred approach, in some case you may # need to place data files outside of your packages. See: - # http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa + # http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # In this case, 'data_file' will be installed into '/my_data' data_files=[], # [('my_data', ['data/data_file'])], ) diff --git a/src/lineup/structureImage/StructureImageRenderer.ts b/src/lineup/structureImage/StructureImageRenderer.ts index 58a093fd7..fa165bd61 100644 --- a/src/lineup/structureImage/StructureImageRenderer.ts +++ b/src/lineup/structureImage/StructureImageRenderer.ts @@ -13,7 +13,7 @@ import { abortAble } from 'lineupengine'; import { StructureImageColumn } from './StructureImageColumn'; import { I18nextManager } from '../../i18n'; -const template = ''; +const template = '
'; function getImageURL(structure: string, substructure: string | null = null, align: string | null = null): string { return `/api/rdkit/?structure=${encodeURIComponent(structure)}${substructure ? `&substructure=${encodeURIComponent(substructure)}` : ''}${ @@ -94,7 +94,6 @@ export class StructureImageRenderer implements ICellRendererFactory { } n.style.backgroundImage = `url('${getImageURL(value, col.getFilter()?.filter, col.getAlign())}')`; n.title = value; - n.href = `https://pubchem.ncbi.nlm.nih.gov/#query=${value}`; }); } return null; diff --git a/tdp_core/dataset/dataset.py b/tdp_core/dataset/dataset.py index ecbf75033..cfcea8c2d 100644 --- a/tdp_core/dataset/dataset.py +++ b/tdp_core/dataset/dataset.py @@ -1,5 +1,4 @@ import itertools -from builtins import str from .. import manager @@ -41,7 +40,7 @@ def get(dataset_id): return None -def add(desc, files=[], id=None): +def add(desc, files=None, id=None): """ adds a new dataset to this storage :param desc: the dict description information @@ -49,6 +48,8 @@ def add(desc, files=[], id=None): :param id: optional the unique id to use :return: the newly created dataset or None if an error occurred """ + if files is None: + files = [] for p in _providers(): r = p.upload(desc, files, id) if r: @@ -56,7 +57,7 @@ def add(desc, files=[], id=None): return None -def update(dataset, desc, files=[]): +def update(dataset, desc, files=None): """ updates the given dataset :param dataset: a dataset or a dataset id @@ -64,6 +65,8 @@ def update(dataset, desc, files=[]): :param files: a list of FileStorage :return: """ + if files is None: + files = [] old = get(dataset) if isinstance(dataset, str) else dataset if old is None: return add(desc, files) @@ -80,7 +83,4 @@ def remove(dataset): old = get(dataset) if isinstance(dataset, str) else dataset if old is None: return False - for p in _providers(): - if p.remove(old): - return True - return False + return any(p.remove(old) for p in _providers()) diff --git a/tdp_core/dataset/dataset_api.py b/tdp_core/dataset/dataset_api.py index b69b3d3a2..a7f2471a8 100644 --- a/tdp_core/dataset/dataset_api.py +++ b/tdp_core/dataset/dataset_api.py @@ -1,5 +1,4 @@ import logging -from builtins import str from flask import Flask, abort, make_response, request from flask.wrappers import Response @@ -18,7 +17,7 @@ def on_value_error(error): _log.error("ValueError: (" + str(error) + ") at " + str(request.environ)) _log.error(error) return ( - "{2} - {0}
{1}
".format("ValueError", error, 500), + "{} - {}
{}
".format(500, "ValueError", error), 500, ) @@ -28,13 +27,13 @@ def _list_format_json(data): def _list_format_treejson(data): - r = dict() + r = {} for d in data: levels = d["fqname"].split("/") act = r for level in levels[:-1]: if level not in act: - act[level] = dict() + act[level] = {} act = act[level] act[d["name"]] = d return jsonify(r, indent=1) @@ -96,11 +95,11 @@ def _list_datasets(): data = data[:limit] format = request.args.get("format", "json") - formats = dict(json=_list_format_json, treejson=_list_format_treejson, csv=_list_format_csv) + formats = {"json": _list_format_json, "treejson": _list_format_treejson, "csv": _list_format_csv} if format not in formats: abort( make_response( - 'invalid format: "{0}" possible ones: {1}'.format(format, ",".join(list(formats.keys()))), + 'invalid format: "{}" possible ones: {}'.format(format, ",".join(list(formats.keys()))), 400, ) ) @@ -209,11 +208,11 @@ def _remove_dataset(dataset_id): r = remove(dataset_id) if r: return jsonify( - dict( - state="success", - msg="Successfully deleted dataset " + dataset_id, - id=dataset_id, - ), + { + "state": "success", + "msg": "Successfully deleted dataset " + dataset_id, + "id": dataset_id, + }, indent=1, ) return "invalid request", 400 diff --git a/tdp_core/dataset/dataset_def.py b/tdp_core/dataset/dataset_def.py index bb9d6b8e0..d45dc4615 100644 --- a/tdp_core/dataset/dataset_def.py +++ b/tdp_core/dataset/dataset_def.py @@ -1,5 +1,4 @@ import abc -from builtins import object from ..security import can_read, can_write from ..utils import fix_id @@ -12,10 +11,10 @@ def to_plural(s): def to_idtype_description(id): - return dict(id=id, name=id, names=to_plural(id)) + return {"id": id, "name": id, "names": to_plural(id)} -class ADataSetEntry(object, metaclass=abc.ABCMeta): +class ADataSetEntry(metaclass=abc.ABCMeta): """ A basic dataset entry """ @@ -43,7 +42,7 @@ def to_description(self): """ :return: a dictionary describing this dataset in a client understandable format """ - return dict(type=self.type, name=self.name, id=self.id, fqname=self.fqname) + return {"type": self.type, "name": self.name, "id": self.id, "fqname": self.fqname} def to_idtype_descriptions(self): """ @@ -84,7 +83,7 @@ def asjson(self): converts this dataset to a json compatible format :return: a json compatible dataset representation """ - return dict() + return {} def can_read(self, user=None): return can_read(self.to_description(), user) @@ -93,7 +92,7 @@ def can_write(self, user=None): return can_write(self.to_description(), user) -class ADataSetProvider(object, metaclass=abc.ABCMeta): +class ADataSetProvider(metaclass=abc.ABCMeta): def __len__(self): import itertools diff --git a/tdp_core/dataset/graph/graph.py b/tdp_core/dataset/graph/graph.py index 97d0900c7..6d2ab1815 100644 --- a/tdp_core/dataset/graph/graph.py +++ b/tdp_core/dataset/graph/graph.py @@ -1,21 +1,20 @@ import abc -from builtins import object from ... import manager from ..dataset_def import ADataSetEntry -class GraphNode(object): +class GraphNode: def __init__(self, t, id, attrs=None): self.type = t self.id = id self.attrs = {} if attrs is None else attrs def asjson(self): - return dict(type=self.type, id=self.id, attrs=self.attrs) + return {"type": self.type, "id": self.id, "attrs": self.attrs} -class GraphEdge(object): +class GraphEdge: def __init__(self, t, id, source=None, target=None, attrs=None): self.type = t self.id = id @@ -24,18 +23,18 @@ def __init__(self, t, id, source=None, target=None, attrs=None): self.attrs = {} if attrs is None else attrs def asjson(self): - return dict( - type=self.type, - id=self.id, - source=self.source, - target=self.target, - attrs=self.attrs, - ) + return { + "type": self.type, + "id": self.id, + "source": self.source, + "target": self.target, + "attrs": self.attrs, + } class AGraph(ADataSetEntry, metaclass=abc.ABCMeta): def __init__(self, name, project, id=None, attrs=None): - super(AGraph, self).__init__(name, project, "graph", id) + super().__init__(name, project, "graph", id) self.attrs = {} if attrs is None else attrs @abc.abstractmethod @@ -55,7 +54,7 @@ def nedges(self): return len(self.edges()) def to_description(self): - r = super(AGraph, self).to_description() + r = super().to_description() r["size"] = [self.nnodes, self.nedges] r["attrs"] = self.attrs return r @@ -64,7 +63,7 @@ def asjson(self): nodes = [a.asjson() for a in self.nodes()] edges = [a.asjson() for a in self.edges()] - r = dict(nodes=nodes, edges=edges) + r = {"nodes": nodes, "edges": edges} return r def add_node(self, data): diff --git a/tdp_core/dataset/graph/graph_api.py b/tdp_core/dataset/graph/graph_api.py index 6c1481971..c16b0a78c 100644 --- a/tdp_core/dataset/graph/graph_api.py +++ b/tdp_core/dataset/graph/graph_api.py @@ -25,7 +25,7 @@ def resolve_formatter(type, format): if p.format == format: # type: ignore return p.load() formats = ",".join(p.format for p in manager.registry.list(type + "-formatter")) # type: ignore - abort(400, 'unknown format "{0}" possible formats are: {1}'.format(format, formats)) + abort(400, 'unknown format "{}" possible formats are: {}'.format(format, formats)) def _list_items(dataset_getter, name, datasetid): diff --git a/tdp_core/db.py b/tdp_core/db.py index 53f3e67ff..8d29052a1 100644 --- a/tdp_core/db.py +++ b/tdp_core/db.py @@ -1,5 +1,5 @@ import logging -from typing import Any, Dict, List, Optional +from typing import Any from flask import abort from sqlalchemy.exc import OperationalError @@ -93,7 +93,7 @@ def to_query(q, supports_array_parameter, parameters): subparameters = {(k + str(i)): vi for i, vi in enumerate(v)} q = q.replace( ":" + k, - "({ids})".format(ids=", ".join(":" + p for p in subparameters.keys())), + "({ids})".format(ids=", ".join(":" + p for p in subparameters)), ) del parameters[k] # delete single parameters.update(subparameters) # add sub @@ -101,7 +101,7 @@ def to_query(q, supports_array_parameter, parameters): return sqlalchemy.sql.text(q) -class WrappedSession(object): +class WrappedSession: def __init__(self, engine): """ session wrapper of sql alchemy with auto cleanup @@ -189,9 +189,9 @@ def get_columns(engine, table_name): def _normalize_columns(col): from sqlalchemy import types - r = dict(label=col["name"], type="string", column=col["name"]) + r = {"label": col["name"], "type": "string", "column": col["name"]} t = col["type"] - if isinstance(t, types.Integer) or isinstance(t, types.Numeric): + if isinstance(t, (types.Integer, types.Numeric)): r["type"] = "number" elif isinstance(t, types.Enum): r["type"] = "categorical" @@ -236,7 +236,7 @@ def _handle_aggregated_score(base_view, config, replacements, args): return replacements -def prepare_arguments(view, config, replacements=None, arguments: Optional[Dict] = None, extra_sql_argument=None): +def prepare_arguments(view, config, replacements=None, arguments: dict | None = None, extra_sql_argument=None): """ prepares for the given view the kwargs and replacements based on the given input :param view: db view @@ -268,7 +268,7 @@ def prepare_arguments(view, config, replacements=None, arguments: Optional[Dict] parser = info.type if info and info.type is not None else lambda x: x try: if info and info.as_list: - vs: List[Any] = arguments.getlist(lookup_key) if hasattr(arguments, "getlist") else arguments.get(lookup_key) # type: ignore + vs: list[Any] = arguments.getlist(lookup_key) if hasattr(arguments, "getlist") else arguments.get(lookup_key) # type: ignore value = tuple([parser(v) for v in vs]) # multi values need to be a tuple not a list elif info and info.list_as_tuple: vs = arguments.getlist(lookup_key) if hasattr(arguments, "getlist") else arguments.get(lookup_key, []) # type: ignore @@ -300,10 +300,9 @@ def prepare_arguments(view, config, replacements=None, arguments: Optional[Dict] if view.replacements is not None: for arg in view.replacements: fallback = arguments.get(arg, "") - if arg in secure_replacements: # has to be part of the replacements - value = replacements.get(arg, "") - else: - value = replacements.get(arg, fallback) # if not a secure one fallback with an argument + value = ( + replacements.get(arg, "") if arg in secure_replacements else replacements.get(arg, fallback) + ) # if not a secure one fallback with an argument if not view.is_valid_replacement(arg, value): _log.warn( 'invalid replacement value detected "%s": "%s"="%s"', @@ -362,9 +361,9 @@ def get_query(database, view_name, replacements=None, arguments=None, extra_sql_ query = view.query if callable(query): - return dict(query="custom function", args=kwargs) + return {"query": "custom function", "args": kwargs} - return dict(query=clean_query(query.format(**replace)), args=kwargs) + return {"query": clean_query(query.format(**replace)), "args": kwargs} def get_filtered_data(database, view_name, args): @@ -456,9 +455,9 @@ def get_count_query(database, view_name, args): ) = _get_count(database, view_name, args) if callable(count_query): - return dict(query="custom function", args=kwargs) + return {"query": "custom function", "args": kwargs} - return dict(query=count_query.format(**replace), args=kwargs) + return {"query": count_query.format(**replace), "args": kwargs} def derive_columns(table_name, engine, columns=None): @@ -503,7 +502,7 @@ def derive_columns(table_name, engine, columns=None): separator = getattr(columns[col], "separator", ";") separated_categories = [category.split(separator) for category in categories] # flatten array - categories = list(set([category for sublist in separated_categories for category in sublist])) + categories = list({category for sublist in separated_categories for category in sublist}) categories.sort() # sort list to avoid random order with each run columns[col]["categories"] = categories @@ -528,7 +527,7 @@ def _lookup(database, view_name, query, page, limit, args): arguments["query_start"] = "{}%".format(query) arguments["query_match"] = "{}".format(query) # add 1 for checking if we have more - replacements = dict(limit=limit + 1, offset=offset, offset2=(offset + limit + 1)) + replacements = {"limit": limit + 1, "offset": offset, "offset2": (offset + limit + 1)} kwargs, replace = prepare_arguments(view, config, replacements, arguments) @@ -539,9 +538,9 @@ def lookup_query(database, view_name, query, page, limit, args): engine, _, sql, replace, kwargs = _lookup(database, view_name, query, page, limit, args) if callable(sql): - return dict(query="custom function", args=kwargs) + return {"query": "custom function", "args": kwargs} - return dict(query=sql.format(**replace), args=kwargs) + return {"query": sql.format(**replace), "args": kwargs} def lookup(database, view_name, query, page, limit, args): diff --git a/tdp_core/dbmanager.py b/tdp_core/dbmanager.py index ad5aba7b7..7dc284ab5 100644 --- a/tdp_core/dbmanager.py +++ b/tdp_core/dbmanager.py @@ -1,5 +1,5 @@ import logging -from typing import Any, Dict, Union +from typing import Any from fastapi import FastAPI from sqlalchemy.engine import Engine @@ -13,18 +13,18 @@ _log = logging.getLogger(__name__) -class DBManager(object): +class DBManager: def __init__(self): - self.connectors: Dict[str, DBConnector] = {} + self.connectors: dict[str, DBConnector] = {} self._plugins = {} - self._engines = dict() - self._sessionmakers = dict() + self._engines = {} + self._sessionmakers = {} def init_app(self, app: FastAPI): app.add_middleware(CloseWebSessionsMiddleware) for p in manager.registry.list("tdp-sql-database-definition"): - config: Dict[str, Any] = manager.settings.get_nested(p.configKey) # type: ignore + config: dict[str, Any] = manager.settings.get_nested(p.configKey) # type: ignore # Only instantiate the connector if it has a module factory, otherwise use an empty one connector: DBConnector = p.load().factory() if p.module else DBConnector() if not connector.dburl: @@ -70,7 +70,7 @@ def connector(self, item) -> DBConnector: raise NotImplementedError("missing db connector: " + item) return self.connectors[item] - def engine(self, item: Union[Engine, str]) -> Engine: + def engine(self, item: Engine | str) -> Engine: if isinstance(item, Engine): return item @@ -78,10 +78,10 @@ def engine(self, item: Union[Engine, str]) -> Engine: raise NotImplementedError("missing db connector: " + item) return self._load_engine(item) - def create_session(self, engine_or_id: Union[Engine, str]) -> Session: + def create_session(self, engine_or_id: Engine | str) -> Session: return self._sessionmakers[self.engine(engine_or_id)]() - def create_web_session(self, engine_or_id: Union[Engine, str]) -> Session: + def create_web_session(self, engine_or_id: Engine | str) -> Session: """ Create a session that is added to the request state as db_session, which automatically closes it in the db_session middleware. """ diff --git a/tdp_core/dbmigration/manager.py b/tdp_core/dbmigration/manager.py index a4f473b31..9cecbb300 100644 --- a/tdp_core/dbmigration/manager.py +++ b/tdp_core/dbmigration/manager.py @@ -2,7 +2,7 @@ import re from argparse import REMAINDER from os import path -from typing import Any, Dict, List, Optional +from typing import Any import alembic.command import alembic.config @@ -18,7 +18,7 @@ alembic_cfg = alembic.config.Config(path.join(path.abspath(path.dirname(__file__)), "dbmigration.ini")) -class DBMigration(object): +class DBMigration: """ DBMigration object stores the required arguments to execute commands using Alembic. """ @@ -30,7 +30,7 @@ def __init__( script_location: str, *, auto_upgrade: bool = False, - version_table_schema: Optional[str] = None, + version_table_schema: str | None = None, ): """ Initializes a new migration object and optionally carries out an upgrade. @@ -47,8 +47,8 @@ def __init__( self.db_url: str = db_url self.script_location: str = script_location self.auto_upgrade: bool = auto_upgrade - self.version_table_schema: Optional[str] = version_table_schema - self.custom_commands: Dict[str, str] = dict() + self.version_table_schema: str | None = version_table_schema + self.custom_commands: dict[str, str] = {} # Because we can't easily pass "-1" as npm argument, we add a custom command for that without the space self.add_custom_command(r"downgrade-(\d+)", "downgrade -{}") @@ -84,7 +84,7 @@ def add_custom_command(self, pattern: str, target: str): def remove_custom_command(self, origin: str): self.custom_commands.pop(origin, None) - def get_custom_command(self, arguments: List[str] = []) -> Optional[List[str]]: + def get_custom_command(self, arguments: list[str] | None = None) -> list[str] | None: """ Returns the rewritten command if it matches the pattern of a custom command. :param List[str] arguments: Argument to rewrite. @@ -101,13 +101,15 @@ def get_custom_command(self, arguments: List[str] = []) -> Optional[List[str]]: return value.format(*matched.groups()).split(" ") return None - def execute(self, arguments: List[str] = []) -> bool: + def execute(self, arguments: list[str] | None = None) -> bool: """ Executes a command on the migration object. :param List[str] arguments: Arguments for the underlying Alembic instance. See https://alembic.sqlalchemy.org/en/latest/api/ for details. Example usage: migration.execute(['upgrade', 'head']) upgrades to the database to head. """ + if arguments is None: + arguments = [] # Rewrite command if possible rewritten_arguments = self.get_custom_command(arguments) if rewritten_arguments: @@ -134,7 +136,7 @@ def execute(self, arguments: List[str] = []) -> bool: return True -class DBMigrationManager(object): +class DBMigrationManager: """ DBMigrationManager retrieves all 'tdp-sql-database-migration' plugins and initializes DBMigration objects. The possible configuration keys for this extension point are: @@ -153,9 +155,11 @@ class DBMigrationManager(object): """ def __init__(self): - self._migrations: Dict[str, DBMigration] = dict() + self._migrations: dict[str, DBMigration] = {} - def init_app(self, app: FastAPI, plugins: List[AExtensionDesc] = []): + def init_app(self, app: FastAPI, plugins: list[AExtensionDesc] | None = None): + if plugins is None: + plugins = [] _log.info(f"Initializing DBMigrationManager with {', '.join([p.id for p in plugins]) or 'no plugins'}") auto_upgrade_default = manager.settings.tdp_core.migrations.autoUpgrade @@ -236,11 +240,11 @@ def __len__(self): return len(self._migrations) @property - def ids(self) -> List[str]: + def ids(self) -> list[str]: return list(self._migrations.keys()) @property - def migrations(self) -> List[DBMigration]: + def migrations(self) -> list[DBMigration]: return list(self._migrations.values()) diff --git a/tdp_core/dbview.py b/tdp_core/dbview.py index a96bb37a1..4dac07bc7 100644 --- a/tdp_core/dbview.py +++ b/tdp_core/dbview.py @@ -1,7 +1,7 @@ import logging import re from collections import OrderedDict -from typing import Any, Optional +from typing import Any import sqlalchemy from sqlalchemy.engine import Engine @@ -15,7 +15,7 @@ REGEX_TYPE = type(re.compile("")) -class ArgumentInfo(object): +class ArgumentInfo: def __init__( self, type=None, @@ -33,14 +33,14 @@ def __init__( self.list_as_tuple = list_as_tuple -class DBFilterData(object): +class DBFilterData: def __init__(self, group, sub_query, join): self.group = group self.sub_query = sub_query self.join = join -class DBView(object): +class DBView: def __init__(self, idtype=None, query=None): self.description = "" self.summary = "" @@ -69,7 +69,7 @@ def dump(self, name): r: OrderedDict[str, Any] = OrderedDict(name=name, description=self.description, type=self.query_type) r["idType"] = self.idtype r["query"] = clean_query(self.query) - args = [a for a in self.arguments] + args = list(self.arguments) args.extend(self.replacements) r["arguments"] = args r["columns"] = list(self.columns.values()) if self.columns else [] # some views have no columns -> return empty array @@ -106,7 +106,7 @@ def get_filter_subjoin(self, key): return None def filter_groups(self): - r = set([v.group for v in list(self.filters.values())]) + r = {v.group for v in list(self.filters.values())} if None in r: r.remove(None) return r @@ -163,7 +163,7 @@ def can_access(self, check_default_security=False): return is_logged_in() # because security is not disabled check if the user is at least logged in -class DBViewBuilder(object): +class DBViewBuilder: """ db view builder pattern implementation """ @@ -585,7 +585,7 @@ def add_common_queries( default_agg_score = DBViewBuilder().query("{agg}({data_subtype})").replace("agg", ["min", "max", "avg"]).replace("data_subtype").build() -class DBMapping(object): +class DBMapping: """ simple mapping based on a query of the form `select from_id as f, to_id as t from mapping_table where f in :ids` """ @@ -597,34 +597,34 @@ def __init__(self, from_idtype, to_idtype, query, integer_ids=False): self.integer_ids = integer_ids -class DBConnector(object): +class DBConnector: """ basic connector object """ - def __init__(self, views={}, agg_score=None, mappings=None): + def __init__(self, views=None, agg_score=None, mappings=None): """ :param views: the dict of query views :param agg_score: optional specify how aggregation should be handled :param mappings: optional database mappings """ self.agg_score = agg_score or default_agg_score - self.views = views + self.views = views or {} self.dburl: str = None # type: ignore self.mappings = mappings self.statement_timeout = None - self.statement_timeout_query: Optional[str] = None + self.statement_timeout_query: str | None = None self.description = "" def dump(self, name): return OrderedDict(name=name, description=self.description) def create_engine(self, config) -> Engine: - engine_options = dict( + engine_options = { # Increase the pool size to 30 to avoid "too many clients" errors - pool_size=30, - pool_pre_ping=True, - ) + "pool_size": 30, + "pool_pre_ping": True, + } engine_options.update(config.get("engine", {})) return sqlalchemy.create_engine(self.dburl, **engine_options) diff --git a/tdp_core/encoder/bytes_to_string_encoder.py b/tdp_core/encoder/bytes_to_string_encoder.py index 5d54b8b1b..b9f95a90f 100644 --- a/tdp_core/encoder/bytes_to_string_encoder.py +++ b/tdp_core/encoder/bytes_to_string_encoder.py @@ -4,7 +4,7 @@ """ -class BytesToStringEncoder(object): +class BytesToStringEncoder: def __contains__(self, obj): if isinstance(obj, bytes): return True diff --git a/tdp_core/encoder/json_encoder.py b/tdp_core/encoder/json_encoder.py index b9910f410..77d895daa 100644 --- a/tdp_core/encoder/json_encoder.py +++ b/tdp_core/encoder/json_encoder.py @@ -1,12 +1,11 @@ import datetime as dt import decimal -from builtins import object, range import numpy as np # type: ignore import numpy.ma as ma # type: ignore -class NumpyTablesEncoder(object): +class NumpyTablesEncoder: def __contains__(self, obj): if isinstance(obj, np.ndarray): # type: ignore return True diff --git a/tdp_core/encoder/set_encoder.py b/tdp_core/encoder/set_encoder.py index 2f4072283..16c65aadf 100644 --- a/tdp_core/encoder/set_encoder.py +++ b/tdp_core/encoder/set_encoder.py @@ -3,7 +3,7 @@ """ -class SetEncoder(object): +class SetEncoder: def __contains__(self, obj): return isinstance(obj, set) diff --git a/tdp_core/formatter.py b/tdp_core/formatter.py index 0bc9f209d..49bb3c569 100644 --- a/tdp_core/formatter.py +++ b/tdp_core/formatter.py @@ -7,8 +7,8 @@ def _format_csv(array_of_dicts): try: import pandas as pd # type: ignore - except ImportError: - raise ImportError("pandas is required to format as csv") + except ImportError as e: + raise ImportError("pandas is required to format as csv") from e if not array_of_dicts: return Response("", mimetype="text/csv") diff --git a/tdp_core/graph.py b/tdp_core/graph.py index a76338798..96cebe143 100644 --- a/tdp_core/graph.py +++ b/tdp_core/graph.py @@ -6,13 +6,13 @@ class MongoGraph(graph.AGraph): def __init__(self, entry, db): - super(MongoGraph, self).__init__(entry["name"], "mongodb", entry.get("id", None), entry.get("attrs", None)) + super().__init__(entry["name"], "mongodb", entry.get("id", None), entry.get("attrs", None)) self._entry = entry self._db = db from bson.objectid import ObjectId - self._find_me = dict(_id=self._entry["_id"]) - self._find_data = dict(_id=ObjectId(self._entry["refid"])) + self._find_me = {"_id": self._entry["_id"]} + self._find_data = {"_id": ObjectId(self._entry["refid"])} self._nodes = None self._edges = None @@ -34,15 +34,15 @@ def create(data, user, id, db): import datetime - entry: dict[str, int | datetime.datetime | str] = dict( - name=data["name"], - description=data.get("description", ""), - creator=user.name, - nnodes=len(data["nodes"]), - nedges=len(data["edges"]), - attrs=data.get("attrs", {}), - ts=datetime.datetime.utcnow(), - ) + entry: dict[str, int | datetime.datetime | str] = { + "name": data["name"], + "description": data.get("description", ""), + "creator": user.name, + "nnodes": len(data["nodes"]), + "nedges": len(data["edges"]), + "attrs": data.get("attrs", {}), + "ts": datetime.datetime.utcnow(), + } if "group" in data: entry["group"] = data["group"] @@ -53,7 +53,7 @@ def create(data, user, id, db): if id is not None: entry["id"] = id - data_entry = dict(nodes=data["nodes"], edges=data["edges"]) + data_entry = {"nodes": data["nodes"], "edges": data["edges"]} data_id = db.graph_data.insert_one(data_entry).inserted_id entry["refid"] = str(data_id) @@ -84,7 +84,7 @@ def nedges(self): return self._entry["nedges"] def to_description(self): - r = super(MongoGraph, self).to_description() + r = super().to_description() if self._entry is not None: r["description"] = self._entry["description"] @@ -102,8 +102,8 @@ def to_description(self): def add_node(self, data): if not self.can_write(): return False - self._db.graph.update(self._find_me, {"$inc": dict(nnodes=1)}) - self._db.graph_data.update(self._find_data, {"$push": dict(nodes=data)}) + self._db.graph.update(self._find_me, {"$inc": {"nnodes": 1}}) + self._db.graph_data.update(self._find_data, {"$push": {"nodes": data}}) self._entry["nnodes"] += 1 if self._nodes: self._nodes.append(graph.GraphNode(data["type"], data["id"], data.get("attrs", None))) @@ -134,11 +134,11 @@ def remove_node(self, id): self._nodes.remove(n) self._entry["nnodes"] -= 1 # remove node and all associated edges - self._db.graph_data.update(self._find_data, {"$pull": dict(nodes=dict(id=id))}, multi=False) + self._db.graph_data.update(self._find_data, {"$pull": {"nodes": {"id": id}}}, multi=False) self._db.graph_data.update( self._find_data, - {"$pull": dict(edges={"$or": [dict(source=id), dict(target=id)]})}, + {"$pull": {"edges": {"$or": [{"source": id}, {"target": id}]}}}, multi=True, ) @@ -150,7 +150,7 @@ def remove_node(self, id): self._entry["nedges"] = len(self._db.graph_data.find_one(self._find_data, {"edges": 1})["edges"]) self._db.graph.update( self._find_me, - {"$inc": dict(nnodes=-1), "$set": dict(nedges=self._entry["nedges"])}, + {"$inc": {"nnodes": -1}, "$set": {"nedges": self._entry["nedges"]}}, ) return True @@ -170,8 +170,8 @@ def get_edge(self, id): def clear(self): if not self.can_write(): return False - self._db.graph.update(self._find_me, {"$set": dict(nnodes=0, nedges=0)}) - self._db.graph_data.update(self._find_data, {"$set": dict(nodes=[], edges=[])}) + self._db.graph.update(self._find_me, {"$set": {"nnodes": 0, "nedges": 0}}) + self._db.graph_data.update(self._find_data, {"$set": {"nodes": [], "edges": []}}) self._nodes = None self._edges = None self._entry["nnodes"] = 0 @@ -181,8 +181,8 @@ def clear(self): def add_edge(self, data): if not self.can_write(): return False - self._db.graph.update(self._find_me, {"$inc": dict(nedges=1)}) - self._db.graph_data.update(self._find_data, {"$push": dict(edges=data)}) + self._db.graph.update(self._find_me, {"$inc": {"nedges": 1}}) + self._db.graph_data.update(self._find_data, {"$push": {"edges": data}}) self._entry["nedges"] += 1 if self._edges: self._edges.append( @@ -220,8 +220,8 @@ def remove_edge(self, id): if n: self._edges.remove(n) self._entry["nedges"] -= 1 - self._db.graph.update(self._find_me, {"$inc": dict(nedges=-1)}) - self._db.graph_data.update(self._find_data, {"$pull": dict(edges=dict(id=id))}) + self._db.graph.update(self._find_me, {"$inc": {"nedges": -1}}) + self._db.graph_data.update(self._find_data, {"$pull": {"edges": {"id": id}}}) return True def remove(self): @@ -293,7 +293,7 @@ def remove(self, entry): return False def upload(self, data, files, id=None): - if not data.get("type", "unknown") == "graph": + if data.get("type", "unknown") != "graph": return None # can't handle from tdp_core.security import current_user diff --git a/tdp_core/id_mapping/idtype_api.py b/tdp_core/id_mapping/idtype_api.py index 8daa9171b..d9ab8c711 100644 --- a/tdp_core/id_mapping/idtype_api.py +++ b/tdp_core/id_mapping/idtype_api.py @@ -12,7 +12,7 @@ @app_idtype.route("/") def _list_idtypes(): - tmp = dict() + tmp = {} # TODO: We probably don't want to have these idtypes as "all" idtypes # for d in list_datasets(): # for idtype in d.to_idtype_descriptions(): diff --git a/tdp_core/id_mapping/manager.py b/tdp_core/id_mapping/manager.py index d0d87ef68..e1ab96c1a 100644 --- a/tdp_core/id_mapping/manager.py +++ b/tdp_core/id_mapping/manager.py @@ -1,14 +1,13 @@ import logging -from builtins import object, set +from builtins import set from itertools import chain -from typing import List from .. import manager _log = logging.getLogger(__name__) -class MappingManager(object): +class MappingManager: """ Mapping manager creating a graph of all available id-2-id mappings, allowing for transitive id-mappings. This graph is traversed via shortest path when mapping from one id-(type) to another. @@ -52,11 +51,13 @@ def known_idtypes(self): s.add(to_) return s - def __find_all_paths(self, graph, start, end, path=[]): + def __find_all_paths(self, graph, start, end, path=None): """ Returns all possible paths in the graph from start to end :return: Array of all possible paths (string arrays) sorted by shortest path first """ + if path is None: + path = [] path = path + [start] if start == end: return [path] @@ -77,7 +78,7 @@ def __resolve_single(self, from_idtype, to_idtype, ids) -> list: _log.warn("cannot find mapping from %s to %s", from_idtype, to_idtype) return [None for _ in ids] - def apply_mapping(mapper, ids: List[str]): + def apply_mapping(mapper, ids: list[str]): # Each mapper can define if it preserves the order of the incoming ids. if hasattr(mapper, "preserves_order") and mapper.preserves_order: return mapper(ids) @@ -94,7 +95,7 @@ def apply_mapping(mapper, ids: List[str]): rset = [set() for _ in ids] for mapper in to_mappings: mapped_ids = apply_mapping(mapper, ids) - for mapped_id, rlist, rhash in zip(mapped_ids, r, rset): + for mapped_id, rlist, rhash in zip(mapped_ids, r, rset, strict=False): for id in mapped_id: if id not in rhash: rlist.append(id) @@ -109,7 +110,8 @@ def merge_2d_arrays(self, source, lengths): """ if len(lengths) == 0 and len(source) == 0: return [] - assert len(lengths) > 0 and min(lengths) >= 1 + assert len(lengths) > 0 + assert min(lengths) >= 1 assert sum(lengths) == len(source) result = [] i = 0 diff --git a/tdp_core/mapping_table.py b/tdp_core/mapping_table.py index d7ef847d3..8aa98e52d 100644 --- a/tdp_core/mapping_table.py +++ b/tdp_core/mapping_table.py @@ -7,7 +7,7 @@ _log = logging.getLogger(__name__) -class SQLMappingTable(object): +class SQLMappingTable: def __init__(self, mapping: DBMapping, engine): self.from_idtype = mapping.from_idtype self.to_idtype = mapping.to_idtype @@ -43,7 +43,7 @@ def _discover_mappings(): yield SQLMappingTable(mapping, engine) -class SQLMappingProvider(object): +class SQLMappingProvider: def __init__(self): self._mappings = list(_discover_mappings()) diff --git a/tdp_core/middleware/close_web_sessions_middleware.py b/tdp_core/middleware/close_web_sessions_middleware.py index dc4b5c9e8..3998ba552 100644 --- a/tdp_core/middleware/close_web_sessions_middleware.py +++ b/tdp_core/middleware/close_web_sessions_middleware.py @@ -1,3 +1,5 @@ +import contextlib + from fastapi import FastAPI from .request_context_plugin import get_request @@ -18,11 +20,7 @@ async def __call__(self, scope, receive, send): r = get_request() if r: - try: + with contextlib.suppress(KeyError, AttributeError): for db_session in r.state.db_sessions: - try: + with contextlib.suppress(Exception): db_session.close() - except Exception: - pass - except (KeyError, AttributeError): - pass diff --git a/tdp_core/middleware/request_context_plugin.py b/tdp_core/middleware/request_context_plugin.py index ed2b65b33..e24d308fa 100644 --- a/tdp_core/middleware/request_context_plugin.py +++ b/tdp_core/middleware/request_context_plugin.py @@ -1,5 +1,3 @@ -from typing import Optional - from starlette.requests import HTTPConnection, Request from starlette_context import context from starlette_context.plugins.base import Plugin @@ -13,5 +11,5 @@ class RequestContextPlugin(Plugin): # The returned value will be inserted in the context with this key key = "request" - async def process_request(self, request: Request | HTTPConnection) -> Optional[Request | HTTPConnection]: + async def process_request(self, request: Request | HTTPConnection) -> Request | HTTPConnection | None: return request diff --git a/tdp_core/mol_img/img_api.py b/tdp_core/mol_img/img_api.py index cb3f082db..803438667 100644 --- a/tdp_core/mol_img/img_api.py +++ b/tdp_core/mol_img/img_api.py @@ -1,12 +1,15 @@ -from typing import List, Optional, Set - from fastapi import APIRouter from rdkit.Chem import Mol # type: ignore from rdkit.Chem.Scaffolds import MurckoScaffold from starlette.responses import Response from starlette.status import HTTP_204_NO_CONTENT -from .models import SmilesMolecule, SmilesSmartsMolecule, SubstructuresResponse, SvgResponse +from .models import ( + SmilesMolecule, + SmilesSmartsMolecule, + SubstructuresResponse, + SvgResponse, +) from .util.draw import draw, draw_similarity from .util.molecule import aligned, maximum_common_substructure_query_mol @@ -14,14 +17,12 @@ @app.get("/", response_class=SvgResponse) -def draw_smiles( - structure: SmilesMolecule, substructure: Optional[SmilesMolecule] = None, align: Optional[SmilesMolecule] = None -): # noqa: E1127 +def draw_smiles(structure: SmilesMolecule, substructure: SmilesMolecule | None = None, align: SmilesMolecule | None = None): return draw(structure.mol, aligned(structure.mol, align and align.mol) or substructure and substructure.mol) @app.post("/") -def multiple_images(structures: Set[SmilesMolecule]): +def multiple_images(structures: set[SmilesMolecule]): return {m: draw(m.mol) for m in structures} @@ -43,7 +44,7 @@ def draw_molecule_similarity(structure: SmilesMolecule, reference: SmilesMolecul @app.post("/mcs/", response_class=SvgResponse) -def draw_maximum_common_substructure_molecule(structures: List[SmilesMolecule]): +def draw_maximum_common_substructure_molecule(structures: list[SmilesMolecule]): unique = [m.mol for m in set(structures)] mcs = maximum_common_substructure_query_mol(unique) if not mcs or not isinstance(mcs, Mol): @@ -52,7 +53,7 @@ def draw_maximum_common_substructure_molecule(structures: List[SmilesMolecule]): @app.post("/substructures/") -def substructures_count(structures: Set[SmilesMolecule], substructure: SmilesSmartsMolecule) -> SubstructuresResponse: +def substructures_count(structures: set[SmilesMolecule], substructure: SmilesSmartsMolecule) -> SubstructuresResponse: """Check and return number of possible substructures in a set of structures""" ssr = SubstructuresResponse() for smiles in set(structures): diff --git a/tdp_core/mol_img/models.py b/tdp_core/mol_img/models.py index 21bd3cf6a..770cc6db9 100644 --- a/tdp_core/mol_img/models.py +++ b/tdp_core/mol_img/models.py @@ -1,5 +1,3 @@ -from typing import Dict, Optional - from pydantic import BaseModel from rdkit.Chem import Mol, MolFromSmarts, MolFromSmiles # type: ignore from starlette.responses import Response @@ -20,7 +18,7 @@ def __get_validators__(cls): yield cls.validate @classmethod - def validate(cls, value: Optional[str]) -> "SmilesMolecule": + def validate(cls, value: str | None) -> "SmilesMolecule": for parser in cls.parsers: mol = parser(value) if mol: @@ -42,5 +40,5 @@ class SvgResponse(Response): class SubstructuresResponse(BaseModel): - count: Dict[str, int] = dict() - valid: Dict[str, bool] = dict() + count: dict[str, int] = {} + valid: dict[str, bool] = {} diff --git a/tdp_core/mol_img/util/molecule.py b/tdp_core/mol_img/util/molecule.py index 3b2a67ba8..7d45d90bd 100644 --- a/tdp_core/mol_img/util/molecule.py +++ b/tdp_core/mol_img/util/molecule.py @@ -1,9 +1,7 @@ -from typing import List, Optional - from rdkit.Chem import Mol, TemplateAlign, rdFMCS # type: ignore -def maximum_common_substructure_query_mol(mols: List[Mol]) -> Optional[Mol]: +def maximum_common_substructure_query_mol(mols: list[Mol]) -> Mol | None: """https://www.rdkit.org/docs/GettingStartedInPython.html#maximum-common-substructure""" return rdFMCS.FindMCS(mols, matchValences=True, ringMatchesRingOnly=True, completeRingsOnly=True).queryMol diff --git a/tdp_core/plugin/model.py b/tdp_core/plugin/model.py index 95a7c34a6..952f4c525 100644 --- a/tdp_core/plugin/model.py +++ b/tdp_core/plugin/model.py @@ -1,11 +1,11 @@ from abc import ABC, abstractmethod -from typing import Any, Dict, Optional, Type +from typing import Any from fastapi import FastAPI from pydantic import BaseModel -class RegHelper(object): +class RegHelper: def __init__(self, plugin): self._items = [] self._plugin = plugin @@ -13,15 +13,15 @@ def __init__(self, plugin): def __iter__(self): return iter(self._items) - def append(self, type_: str, id_: str, module_: Any, desc: Dict[str, Any] = {}): - desc = {} if not desc else desc + def append(self, type_: str, id_: str, module_: Any, desc: dict[str, Any] | None = None): + desc = desc if desc else {} desc["type"] = type_ desc["id"] = id_ desc["module"] = module_ desc["plugin"] = self._plugin self._items.append(desc) - def append_router(self, id_: str, module_: Any, desc: Dict[str, Any]): + def append_router(self, id_: str, module_: Any, desc: dict[str, Any]): # TODO: Validate desc return self.append("fastapi_router", id_, module_, desc) @@ -31,9 +31,9 @@ class AVisynPlugin(ABC): def register(self, registry: RegHelper): pass - def init_app(self, app: FastAPI): + def init_app(self, app: FastAPI): # noqa: B027 pass @property - def setting_class(self) -> Optional[Type[BaseModel]]: + def setting_class(self) -> type[BaseModel] | None: return None diff --git a/tdp_core/plugin/parser.py b/tdp_core/plugin/parser.py index add01d918..18d15fdf2 100644 --- a/tdp_core/plugin/parser.py +++ b/tdp_core/plugin/parser.py @@ -2,7 +2,6 @@ import logging from functools import cached_property, lru_cache from importlib.metadata import EntryPoint, entry_points -from typing import Dict, List, Tuple, Type from pydantic import BaseModel @@ -43,7 +42,7 @@ def check(disable): return any(map(check, manager.settings.tdp_core.disable.extensions)) -class EntryPointPlugin(object): +class EntryPointPlugin: def __init__(self, entry_point: EntryPoint): self.entry_point = entry_point self.id = entry_point.name @@ -59,14 +58,14 @@ def is_app(): @cached_property def plugin(self) -> AVisynPlugin: - visyn_plugin_clazz: Type[AVisynPlugin] = self.entry_point.load() + visyn_plugin_clazz: type[AVisynPlugin] = self.entry_point.load() if not issubclass(visyn_plugin_clazz, AVisynPlugin): raise Exception("Entrypoint plugin {self.id} does not load a proper class extending AVisynPlugin") return visyn_plugin_clazz() - @lru_cache + @lru_cache # noqa: B019 def get_module(self): return importlib.import_module(self.id) @@ -79,9 +78,9 @@ def _find_entry_point_plugins(): return [EntryPointPlugin(entry_point) for entry_point in entry_points(group="visyn.plugin")] -def load_all_plugins() -> List[EntryPointPlugin]: +def load_all_plugins() -> list[EntryPointPlugin]: # Load all plugins found via entry points - plugins: List[EntryPointPlugin] = [p for p in _find_entry_point_plugins() if not is_disabled_plugin(p)] + plugins: list[EntryPointPlugin] = [p for p in _find_entry_point_plugins() if not is_disabled_plugin(p)] plugins.sort(key=lambda p: p.id) _log.info(f"Discovered {len(plugins)} plugin(s): {', '.join([d.id for d in plugins])}") @@ -89,7 +88,7 @@ def load_all_plugins() -> List[EntryPointPlugin]: return plugins -def get_extensions_from_plugins(plugins: List[EntryPointPlugin]) -> List: +def get_extensions_from_plugins(plugins: list[EntryPointPlugin]) -> list: server_extensions = [] for plugin in plugins: reg = RegHelper(plugin) @@ -102,12 +101,12 @@ def get_extensions_from_plugins(plugins: List[EntryPointPlugin]) -> List: return server_extensions -def get_config_from_plugins(plugins: List[EntryPointPlugin]) -> Tuple[List[Dict[str, Dict]], Dict[str, Type[BaseModel]]]: +def get_config_from_plugins(plugins: list[EntryPointPlugin]) -> tuple[list[dict[str, dict]], dict[str, type[BaseModel]]]: # from ..settings.utils import load_config_file # With all the plugins, load the corresponding configuration files and add them to the global config - files: List[Dict[str, Dict]] = [] - models: Dict[str, Type[BaseModel]] = {} + files: list[dict[str, dict]] = [] + models: dict[str, type[BaseModel]] = {} for plugin in plugins: plugin_settings_model = plugin.plugin.setting_class if plugin_settings_model: diff --git a/tdp_core/plugin/registry.py b/tdp_core/plugin/registry.py index 66c67bf21..edb64da91 100644 --- a/tdp_core/plugin/registry.py +++ b/tdp_core/plugin/registry.py @@ -1,6 +1,4 @@ import logging -from builtins import object -from typing import List from fastapi import FastAPI @@ -9,7 +7,7 @@ _log = logging.getLogger(__name__) -class Extension(object): +class Extension: """ the loaded plugin instance """ @@ -28,10 +26,7 @@ def __call__(self, *args, **kwargs): m = getattr(self.impl, self.desc.factory) - if hasattr(m, "__call__"): - v = m(*args, **kwargs) - else: - v = m + v = m(*args, **kwargs) if callable(m) else m self._cache = v return v @@ -39,7 +34,7 @@ def factory(self, *args, **kwargs): return self(*args, **kwargs) -class AExtensionDesc(object): +class AExtensionDesc: def __init__(self, desc): self.type = desc.get("type", "unknown") self.id = desc["id"] @@ -59,7 +54,7 @@ class ExtensionDesc(AExtensionDesc): """ def __init__(self, desc): - super(ExtensionDesc, self).__init__(desc) + super().__init__(desc) self._impl = None # from js notation to python notation @@ -74,12 +69,12 @@ def load(self): return self._impl -class Registry(object): +class Registry: def __init__(self): - self.plugins: List[EntryPointPlugin] = [] - self._extensions: List[ExtensionDesc] = [] + self.plugins: list[EntryPointPlugin] = [] + self._extensions: list[ExtensionDesc] = [] - def init_app(self, app: FastAPI, plugins: List[EntryPointPlugin]): + def init_app(self, app: FastAPI, plugins: list[EntryPointPlugin]): self.plugins = plugins self._extensions = [ExtensionDesc(p) for p in get_extensions_from_plugins(plugins)] @@ -92,9 +87,7 @@ def __getitem__(self, item): def __iter__(self): return iter(self._extensions) - def list(self, plugin_type=None) -> List[ExtensionDesc] | "Registry": - if plugin_type is None: - return self - if not hasattr(plugin_type, "__call__"): # not a callable + def list(self, plugin_type) -> list[ExtensionDesc]: + if not callable(plugin_type): return [x for x in self if x.type == plugin_type] return [x for x in self if plugin_type(x)] diff --git a/tdp_core/plugin/router.py b/tdp_core/plugin/router.py index e2a93cc60..cd6cf1427 100644 --- a/tdp_core/plugin/router.py +++ b/tdp_core/plugin/router.py @@ -28,7 +28,7 @@ def get_plugins(): "description": e.description, "version": e.version, } - for e in manager.registry.list() + for e in manager.registry ], } diff --git a/tdp_core/proxy.py b/tdp_core/proxy.py index 1bf05d79a..e6501321d 100644 --- a/tdp_core/proxy.py +++ b/tdp_core/proxy.py @@ -15,7 +15,7 @@ def _to_site_url(site): proxy_defs = manager.registry.list("tdp_proxy") for p in proxy_defs: if p.id == site: - headers = getattr(p, "headers") if hasattr(p, "headers") else dict() + headers = p.headers if hasattr(p, "headers") else {} # type: ignore return p.url.format(**request.args.to_dict()), headers # type: ignore # none matching found return None, None diff --git a/tdp_core/security/__init__.py b/tdp_core/security/__init__.py index e0c590c78..84b475cec 100644 --- a/tdp_core/security/__init__.py +++ b/tdp_core/security/__init__.py @@ -2,9 +2,21 @@ from functools import wraps from ..formatter import formatter -from .manager import current_user, current_username, is_logged_in, login_required # NOQA +from .manager import ( # NOQA + current_user, + current_username, + is_logged_in, + login_required, +) from .model import User # NOQA -from .permissions import DEFAULT_PERMISSION, _includes, can, can_execute, can_read, can_write # NOQA +from .permissions import ( # NOQA + DEFAULT_PERMISSION, + _includes, + can, + can_execute, + can_read, + can_write, +) # custom login_required decorator to be able to disable the login for DBViews, i.e. to make them public diff --git a/tdp_core/security/manager.py b/tdp_core/security/manager.py index 40515c2b1..f38629144 100644 --- a/tdp_core/security/manager.py +++ b/tdp_core/security/manager.py @@ -1,8 +1,9 @@ +import contextlib import logging from base64 import b64decode from datetime import datetime, timedelta, timezone from functools import wraps -from typing import Callable, Dict, List, Optional, Tuple +from typing import Callable import jwt from fastapi import FastAPI, HTTPException, Request, Response, status @@ -16,7 +17,7 @@ _log = logging.getLogger(__name__) -def user_to_access_token(user: User) -> Tuple[str, Dict]: +def user_to_access_token(user: User) -> tuple[str, dict]: # Define access token data payload = {} @@ -40,19 +41,19 @@ def user_to_access_token(user: User) -> Tuple[str, Dict]: return jwt.encode(payload, manager.settings.secret_key, algorithm=manager.settings.jwt_algorithm), payload -def access_token_to_payload(token: str) -> Dict: +def access_token_to_payload(token: str) -> dict: return jwt.decode(token, manager.settings.secret_key, algorithms=[manager.settings.jwt_algorithm]) -def access_token_to_user(token: str) -> Optional[User]: +def access_token_to_user(token: str) -> User | None: payload = access_token_to_payload(token) - username: Optional[str] = payload.get("sub") + username: str | None = payload.get("sub") if not username: return None return User(id=username, access_token=token, roles=payload.get("roles", [])) -def user_to_dict(user: User, access_token: Optional[str] = None, payload: Optional[Dict] = None) -> Dict: +def user_to_dict(user: User, access_token: str | None = None, payload: dict | None = None) -> dict: if not payload and access_token: payload = access_token_to_payload(access_token) @@ -80,11 +81,11 @@ def add_access_token_to_response(response: Response, access_token: str) -> Respo class SecurityManager: - def __init__(self, user_stores: List[BaseStore]): - self.user_stores: List[BaseStore] = user_stores - self._additional_jwt_claims_loader: List[Callable[[User], Dict]] = [] + def __init__(self, user_stores: list[BaseStore]): + self.user_stores: list[BaseStore] = user_stores + self._additional_jwt_claims_loader: list[Callable[[User], dict]] = [] - def login(self, username, extra_fields=None) -> Optional[User]: + def login(self, username, extra_fields=None) -> User | None: return self._delegate_stores_until_not_none("login", username, extra_fields or {}) def logout(self): @@ -117,17 +118,15 @@ def _delegate_stores_until_not_none(self, store_method_name: str, *args): return value @property - def current_user(self) -> Optional[User]: + def current_user(self) -> User | None: try: r = get_request() if r: # Fetch the existing user from the request if there is any - try: + with contextlib.suppress(KeyError, AttributeError): user = r.state.user if user: return user - except (KeyError, AttributeError): - pass # If there is no user, try to load it from the request and store it in the request user = r.state.user = self.load_from_request(r) return user @@ -166,17 +165,15 @@ def load_from_request(self, request: Request): headers={"WWW-Authenticate": "Bearer"}, ) - def _load_from_key(self, request: Request) -> Optional[User]: + def _load_from_key(self, request: Request) -> User | None: # try to login using the api_key url arg api_key = request.headers.get("apiKey") if not api_key: # then, try to login using Basic Auth api_key = request.headers.get("Authorization") if api_key: - try: + with contextlib.suppress(Exception): api_key = b64decode(api_key.replace("Basic ", "", 1)).decode("utf-8") - except Exception: - pass if api_key: return self._delegate_stores_until_not_none("load_from_key", api_key) @@ -196,7 +193,8 @@ def init_app(self, app: FastAPI): @app.middleware("http") async def refresh_token_middleware(request: Request, call_next): response = await call_next(request) - try: + # Case where there is not a valid JWT. Just return the original respone + with contextlib.suppress(RuntimeError, KeyError, AttributeError): # Use the cached user from the request, to only refresh a token if the user was actually requested. This avoids calling load_from_request for every request. user = request.state.user if user and user.access_token: @@ -207,13 +205,9 @@ async def refresh_token_middleware(request: Request, call_next): if target_timestamp > exp_timestamp: access_token, payload = user_to_access_token(user) add_access_token_to_response(response, access_token) - except (RuntimeError, KeyError, AttributeError): - # Case where there is not a valid JWT. Just return the original respone - pass - finally: - return response + return response - def jwt_claims_loader(self, callback: Callable[[User], Dict]): + def jwt_claims_loader(self, callback: Callable[[User], dict]): """ Register additional jwt claims loaders. These will be called with the current user when a new token is issued. diff --git a/tdp_core/security/model.py b/tdp_core/security/model.py index 79349dd62..f093ab8b6 100644 --- a/tdp_core/security/model.py +++ b/tdp_core/security/model.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List, Optional +from typing import Any from pydantic import BaseModel @@ -11,14 +11,14 @@ class Token(BaseModel): class LogoutReturnValue(BaseModel): - data: Optional[Dict[Any, Any]] = {} - cookies: Optional[List[Dict[Any, Any]]] = [] + data: dict[Any, Any] | None = {} + cookies: list[dict[Any, Any]] | None = [] class User(BaseModel): id: str - roles: List[str] = [] - access_token: Optional[str] = None + roles: list[str] = [] + access_token: str | None = None @property def name(self): diff --git a/tdp_core/security/permissions.py b/tdp_core/security/permissions.py index 25e553904..114ac5a33 100644 --- a/tdp_core/security/permissions.py +++ b/tdp_core/security/permissions.py @@ -1,5 +1,3 @@ -from typing import Optional - from .manager import current_user from .model import ANONYMOUS_USER, User @@ -62,13 +60,10 @@ def _is_equal(a, b): def _includes(items, item): if not item: return False - for check in items: - if _is_equal(check, item): - return True - return False + return any(_is_equal(check, item) for check in items) -def can(item, permission: int, user: Optional[User] = None): +def can(item, permission: int, user: User | None = None): if user is None: user = current_user() @@ -98,13 +93,13 @@ def can(item, permission: int, user: Optional[User] = None): return permission in others -def can_read(data_description, user: Optional[User] = None): +def can_read(data_description, user: User | None = None): return can(data_description, PERMISSION_READ, user) -def can_write(data_description, user: Optional[User] = None): +def can_write(data_description, user: User | None = None): return can(data_description, PERMISSION_WRITE, user) -def can_execute(data_description, user: Optional[User] = None): +def can_execute(data_description, user: User | None = None): return can(data_description, PERMISSION_EXECUTE, user) diff --git a/tdp_core/security/store/alb_security_store.py b/tdp_core/security/store/alb_security_store.py index 449ee0718..c58e4ae8a 100644 --- a/tdp_core/security/store/alb_security_store.py +++ b/tdp_core/security/store/alb_security_store.py @@ -1,5 +1,4 @@ import logging -from typing import Optional import jwt @@ -11,9 +10,9 @@ class ALBSecurityStore(BaseStore): - def __init__(self, cookie_name: Optional[str], signout_url: Optional[str]): + def __init__(self, cookie_name: str | None, signout_url: str | None): self.cookie_name = cookie_name - self.signout_url: Optional[str] = signout_url + self.signout_url: str | None = signout_url def load_from_request(self, req): if "X-Amzn-Oidc-Identity" in req.headers and "X-Amzn-Oidc-Accesstoken" in req.headers and "X-Amzn-Oidc-Data" in req.headers: diff --git a/tdp_core/security/store/base_store.py b/tdp_core/security/store/base_store.py index 8aad5e4f9..98599494c 100644 --- a/tdp_core/security/store/base_store.py +++ b/tdp_core/security/store/base_store.py @@ -1,26 +1,25 @@ -from abc import ABC -from typing import Optional - from fastapi import FastAPI, Request from ..model import LogoutReturnValue, User -class BaseStore(ABC): +class BaseStore: def __init__(self): - pass + return None def init_app(self, app: FastAPI): - pass + return None - def load_from_request(self, request: Request) -> Optional[User]: + def load_from_request(self, request: Request) -> User | None: return None - def load_from_key(self, key: str) -> Optional[User]: + def load_from_key(self, key: str) -> User | None: return None - def login(self, username: str, extra_fields={}) -> Optional[User]: + def login(self, username: str, extra_fields=None) -> User | None: + if extra_fields is None: + extra_fields = {} return None - def logout(self, user: User) -> Optional[LogoutReturnValue]: - pass + def logout(self, user: User) -> LogoutReturnValue | None: + return None diff --git a/tdp_core/security/store/dummy_store.py b/tdp_core/security/store/dummy_store.py index 808e5c04b..dda08b79a 100644 --- a/tdp_core/security/store/dummy_store.py +++ b/tdp_core/security/store/dummy_store.py @@ -42,7 +42,9 @@ def load_from_key(self, api_key: str): None, ) - def login(self, username, extra_fields={}): + def login(self, username, extra_fields=None): + if extra_fields is None: + extra_fields = {} return next( (u for u in self._users if u.id == username and u.is_password(extra_fields["password"])), None, diff --git a/tdp_core/security/store/no_security_store.py b/tdp_core/security/store/no_security_store.py index 612342e8f..cfa905b92 100644 --- a/tdp_core/security/store/no_security_store.py +++ b/tdp_core/security/store/no_security_store.py @@ -1,5 +1,4 @@ import logging -from typing import List from ... import manager from ..model import User @@ -9,7 +8,7 @@ class NoSecurityStore(BaseStore): - def __init__(self, user: str, roles: List[str]): + def __init__(self, user: str, roles: list[str]): self.user = user self.roles = roles diff --git a/tdp_core/server/cmd.py b/tdp_core/server/cmd.py index 749c5419e..635bc8a36 100644 --- a/tdp_core/server/cmd.py +++ b/tdp_core/server/cmd.py @@ -1,13 +1,13 @@ import logging import shlex -from typing import Callable, Optional, Union +from typing import Callable from .. import manager _log = logging.getLogger(__name__) -def parse_command_string(cmd: Optional[str]) -> Union[Callable, None]: +def parse_command_string(cmd: str | None) -> Callable | None: """ Parses an application command. Example using cmd entrypoint: diff --git a/tdp_core/server/mainapp.py b/tdp_core/server/mainapp.py index a62d50242..6f62ee7e1 100644 --- a/tdp_core/server/mainapp.py +++ b/tdp_core/server/mainapp.py @@ -1,7 +1,6 @@ import logging import os import re -from builtins import next from flask import Flask, send_from_directory from werkzeug.security import safe_join @@ -120,7 +119,7 @@ def build_info(): dependencies = [] all_plugins = [] - build_info = dict(plugins=all_plugins, dependencies=dependencies) + build_info = {"plugins": all_plugins, "dependencies": dependencies} requirements = "requirements.txt" if os.path.exists(requirements): @@ -133,7 +132,7 @@ def build_info(): build_info["version"] = p.version # type: ignore build_info["resolved"] = p.resolved # type: ignore else: - desc = dict(name=p.name, version=p.version, resolved=p.resolved) + desc = {"name": p.name, "version": p.version, "resolved": p.resolved} all_plugins.append(desc) return build_info diff --git a/tdp_core/server/utils.py b/tdp_core/server/utils.py index 04073c59b..4b40a34e5 100644 --- a/tdp_core/server/utils.py +++ b/tdp_core/server/utils.py @@ -2,7 +2,6 @@ import logging import time import traceback -from typing import Optional from flask import Flask, jsonify from werkzeug.exceptions import HTTPException @@ -65,7 +64,7 @@ def load_after_server_started_hooks(): _log.info("Elapsed time for server startup hooks: %d seconds", time.time() - start) -def detail_from_exception(e: Exception) -> Optional[str]: +def detail_from_exception(e: Exception) -> str | None: """Returns the full stacktrace in development mode and just the error message in production mode.""" # Always return full stacktrace in development mode if manager.settings.is_development_mode: diff --git a/tdp_core/server/visyn_server.py b/tdp_core/server/visyn_server.py index 0b3fef54a..f04aa8c01 100644 --- a/tdp_core/server/visyn_server.py +++ b/tdp_core/server/visyn_server.py @@ -2,7 +2,7 @@ import logging.config import sys import threading -from typing import Any, Dict, Optional +from typing import Any import anyio from fastapi import FastAPI @@ -18,7 +18,7 @@ def create_visyn_server( - *, fast_api_args: Dict[str, Any] = {}, start_cmd: Optional[str] = None, workspace_config: Optional[Dict] = None + *, fast_api_args: dict[str, Any] | None = None, start_cmd: str | None = None, workspace_config: dict | None = None ) -> FastAPI: """ Create a new FastAPI instance while ensuring that the configuration and plugins are loaded, extension points are registered, database migrations are executed, ... @@ -28,6 +28,8 @@ def create_visyn_server( start_cmd: Optional start command for the server, i.e. db-migration exposes commands like `db-migration exec <..> upgrade head`. workspace_config: Optional override for the workspace configuration. If nothing is provided `load_workspace_config()` is used instead. """ + if fast_api_args is None: + fast_api_args = {} from .. import manager from ..settings.model import GlobalSettings from ..settings.utils import load_workspace_config diff --git a/tdp_core/settings/model.py b/tdp_core/settings/model.py index ccf104743..0c6f725da 100644 --- a/tdp_core/settings/model.py +++ b/tdp_core/settings/model.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List, Literal, Optional +from typing import Any, Literal from pydantic import BaseModel, BaseSettings, Extra, Field @@ -23,20 +23,20 @@ class MongoSettings(BaseModel): class DisableSettings(BaseModel): - plugins: List[str] = [] - extensions: List[str] = [] + plugins: list[str] = [] + extensions: list[str] = [] class AlbSecurityStoreSettings(BaseModel): enable: bool = False - cookie_name: Optional[str] = None - signout_url: Optional[str] = None + cookie_name: str | None = None + signout_url: str | None = None class NoSecurityStoreSettings(BaseModel): enable: bool = False user: str = "admin" - roles: List[str] = [] + roles: list[str] = [] class SecurityStoreSettings(BaseModel): @@ -57,16 +57,16 @@ class TDPCoreSettings(BaseModel): """ disable: DisableSettings = DisableSettings() - enabled_plugins: List[str] = [] + enabled_plugins: list[str] = [] # TODO: Proper typing. This is 1:1 passed to the logging.config.dictConfig(...). - logging: Dict = Field(default_logging_dict) + logging: dict = Field(default_logging_dict) # tdp_core migrations: DBMigrationSettings = DBMigrationSettings() # phovea_security_flask - users: List[Dict[str, Any]] = Field( + users: list[dict[str, Any]] = Field( [ { "name": "admin", @@ -97,7 +97,7 @@ class GlobalSettings(BaseSettings): secret_key: str = "VERY_SECRET_STUFF_T0IB84wlQrdMH8RVT28w" # JWT options mostly inspired by flask-jwt-extended: https://flask-jwt-extended.readthedocs.io/en/stable/options/#general-options - jwt_token_location: List[str] = ["headers", "cookies"] + jwt_token_location: list[str] = ["headers", "cookies"] jwt_expire_in_seconds: int = 24 * 60 * 60 jwt_refresh_if_expiring_in_seconds: int = 30 * 60 jwt_algorithm: str = "HS256" @@ -105,7 +105,7 @@ class GlobalSettings(BaseSettings): jwt_header_name: str = "Authorization" jwt_header_type: str = "Bearer" jwt_cookie_secure: bool = False - jwt_cookie_samesite: Optional[Literal["lax", "strict", "none"]] = "strict" + jwt_cookie_samesite: Literal["lax", "strict", "none"] | None = "strict" jwt_access_cookie_path: str = "/" # General settings for tdp_core @@ -115,7 +115,7 @@ class GlobalSettings(BaseSettings): def is_development_mode(self) -> bool: return self.env.startswith("dev") - def get_nested(self, key: str, default: Any = None) -> Optional[Any]: + def get_nested(self, key: str, default: Any = None) -> Any | None: """ Retrieves the value at the position of the key from the dict-ified settings, or `default` if `None` is found. This method is for legacy purposes only, you should in most cases just use the settings directly. diff --git a/tdp_core/settings/utils.py b/tdp_core/settings/utils.py index c8b111510..786bee19d 100644 --- a/tdp_core/settings/utils.py +++ b/tdp_core/settings/utils.py @@ -1,14 +1,14 @@ import codecs import logging import os -from typing import Any, Dict +from typing import Any import jsoncfg _log = logging.getLogger(__name__) -def load_workspace_config() -> Dict[str, Any]: +def load_workspace_config() -> dict[str, Any]: """ Loads the global config.json placed at `PHOVEA_CONFIG_PATH` (defaults to `config.json`). """ @@ -22,7 +22,7 @@ def load_workspace_config() -> Dict[str, Any]: return {} -def load_config_file(path: str) -> Dict[str, Any]: +def load_config_file(path: str) -> dict[str, Any]: """ Opens any `*.json` file and loads it via `jsoncfg.loads`. """ diff --git a/tdp_core/sql.py b/tdp_core/sql.py index 04d68462a..7b86f4d14 100644 --- a/tdp_core/sql.py +++ b/tdp_core/sql.py @@ -94,10 +94,7 @@ def get_score_data(database, view_name): data_idtype = view.idtype target_idtype = request.values.get("target", data_idtype) - if data_idtype != target_idtype: - mapped_scores = map_scores(r, data_idtype, target_idtype) - else: - mapped_scores = r + mapped_scores = map_scores(r, data_idtype, target_idtype) if data_idtype != target_idtype else r return format(mapped_scores) @@ -148,7 +145,7 @@ def lookup(database, view_name): r_items, more, view = db.lookup(database, view_name, query, page, limit, request.values) - return jsonify(dict(items=r_items, more=more)) + return jsonify({"items": r_items, "more": more}) def create(): diff --git a/tdp_core/sql_filter.py b/tdp_core/sql_filter.py index 244b7c8c4..dd67453fb 100644 --- a/tdp_core/sql_filter.py +++ b/tdp_core/sql_filter.py @@ -38,7 +38,7 @@ def filter_logic(view, args): :return: """ processed_args = MultiDict() - extra_args = dict() + extra_args = {} where_clause = {} for k, v in list(args.lists()): if k.endswith("[]"): @@ -174,14 +174,14 @@ def to_clause(k, v): else: where_group_clauses[group].append((clause, join)) - replacements = dict() + replacements = {} replacements["and_where"] = (" AND " + " AND ".join(c for c, _ in where_default_clause)) if where_default_clause else "" replacements["where"] = (" WHERE " + " AND ".join(c for c, _ in where_default_clause)) if where_default_clause else "" # unique joins - replacements["joins"] = " ".join(set(j for _, j in where_default_clause if j is not None)) + replacements["joins"] = " ".join({j for _, j in where_default_clause if j is not None}) for group, v in list(where_group_clauses.items()): replacements["and_" + group + "_where"] = (" AND " + " AND ".join(c for c, _ in v)) if v else "" replacements[group + "_where"] = (" WHERE " + " AND ".join(c for c, _ in v)) if v else "" - replacements[group + "_joins"] = " ".join(set(j for _, j in v if j is not None)) + replacements[group + "_joins"] = " ".join({j for _, j in v if j is not None}) return replacements, processed_args, extra_args, where_clause diff --git a/tdp_core/storage.py b/tdp_core/storage.py index dd34758ff..e7f00f0f8 100644 --- a/tdp_core/storage.py +++ b/tdp_core/storage.py @@ -20,8 +20,8 @@ def list_namedset(): db = MongoClient(c.host, c.port)[c.db_namedsets] if request.method == "GET": - q = dict(idType=request.args["idType"]) if "idType" in request.args else {} - return jsonify(list((d for d in db.namedsets.find(q, {"_id": 0}) if security.can_read(d)))) + q = {"idType": request.args["idType"]} if "idType" in request.args else {} + return jsonify(([d for d in db.namedsets.find(q, {"_id": 0}) if security.can_read(d)])) if request.method == "POST": id = _generate_id() @@ -34,18 +34,18 @@ def list_namedset(): sub_type_key = request.values.get("subTypeKey", "") sub_type_value = request.values.get("subTypeValue", "") type = int(request.values.get("type", "0")) - entry = dict( - id=id, - name=name, - creator=creator, - permissions=permissions, - ids=ids, - idType=id_type, - description=description, - subTypeKey=sub_type_key, - subTypeValue=sub_type_value, - type=type, - ) + entry = { + "id": id, + "name": name, + "creator": creator, + "permissions": permissions, + "ids": ids, + "idType": id_type, + "description": description, + "subTypeKey": sub_type_key, + "subTypeValue": sub_type_value, + "type": type, + } db.namedsets.insert_one(entry) del entry["_id"] return jsonify(entry) @@ -54,7 +54,7 @@ def list_namedset(): @app.route("/namedset/", methods=["GET", "DELETE", "PUT"]) # type: ignore def get_namedset(namedset_id): db = MongoClient(c.host, c.port)[c.db_namedsets] - result = list(db.namedsets.find(dict(id=namedset_id), {"_id": 0})) + result = list(db.namedsets.find({"id": namedset_id}, {"_id": 0})) entry = result[0] if len(result) > 0 else None if not entry: @@ -68,15 +68,15 @@ def get_namedset(namedset_id): if request.method == "DELETE": if not security.can_write(entry): abort(403, 'Namedset with id "{}" is write protected'.format(namedset_id)) - q = dict(id=namedset_id) + q = {"id": namedset_id} result = db.namedsets.remove(q) return jsonify(result["n"]) # number of deleted documents if request.method == "PUT": if not security.can_write(entry): abort(403, 'Namedset with id "{}" is write protected'.format(namedset_id)) - filter = dict(id=namedset_id) - values = dict() + filter = {"id": namedset_id} + values = {} for key in ["name", "idType", "description", "subTypeKey", "subTypeValue"]: if key in request.form: values[key] = request.form[key] @@ -93,7 +93,7 @@ def get_namedset(namedset_id): def get_namedset_by_id(namedset_id): db = MongoClient(c.host, c.port)[c.db_namedsets] - q = dict(id=namedset_id) + q = {"id": namedset_id} result = list(db.namedsets.find(q, {"_id": 0})) if not result: abort(404, 'Namedset with id "{}" cannot be found'.format(namedset_id)) @@ -120,7 +120,7 @@ def post_attachment(): creator = security.current_username() permissions = security.DEFAULT_PERMISSION - entry = dict(id=id, creator=creator, permissions=permissions, data=request.data) + entry = {"id": id, "creator": creator, "permissions": permissions, "data": request.data} db.attachments.insert_one(entry) return id @@ -128,7 +128,7 @@ def post_attachment(): @app.route("/attachment/", methods=["GET", "DELETE", "PUT"]) # type: ignore def get_attachment(attachment_id): db = MongoClient(c.host, c.port)[c.db_namedsets] - result = list(db.attachments.find(dict(id=attachment_id), {"_id": 0})) + result = list(db.attachments.find({"id": attachment_id}, {"_id": 0})) entry = result[0] if len(result) > 0 else None if not entry: @@ -142,16 +142,16 @@ def get_attachment(attachment_id): if request.method == "DELETE": if not security.can_write(entry): abort(403, 'Attachment with id "{}" is write protected'.format(attachment_id)) - q = dict(id=attachment_id) + q = {"id": attachment_id} result = db.attachments.remove(q) return jsonify(result["n"]) # number of deleted documents if request.method == "PUT": if not security.can_write(entry): abort(403, 'Attachment with id "{}" is write protected'.format(attachment_id)) - filter = dict(id=attachment_id) + filter = {"id": attachment_id} # keep the encoded string - query = {"$set": dict(data=request.data)} + query = {"$set": {"data": request.data}} db.attachments.find_one_and_update(filter, query) return attachment_id diff --git a/tdp_core/swagger.py b/tdp_core/swagger.py index 7f067b738..a077298a2 100644 --- a/tdp_core/swagger.py +++ b/tdp_core/swagger.py @@ -15,7 +15,6 @@ def _gen(): - import io from os import path from yaml import safe_load @@ -27,7 +26,7 @@ def _gen(): base: dict[str, Any] = yaml_load(files) # type: ignore base["paths"] = OrderedDict(sorted(base["paths"].items(), key=lambda t: t[0])) - with io.open(path.join(here, "swagger", "view.tmpl.yml"), "r", encoding="utf-8") as f: + with open(path.join(here, "swagger", "view.tmpl.yml"), encoding="utf-8") as f: template = Template(str(f.read())) tags = base["tags"] @@ -46,7 +45,7 @@ def to_type(t): db.resolve(database) # trigger filling up columns # add database tag - tags.append(dict(name="db_" + database, description=connector.description or "")) + tags.append({"name": "db_" + database, "description": connector.description or ""}) for view, dbview in connector.views.items(): if not dbview.can_access() or dbview.query_type == "private": @@ -62,14 +61,14 @@ def to_type(t): for arg in dbview.arguments: info = dbview.get_argument_info(arg) args.append( - dict( - name=arg, - type=to_type(info.type), - as_list=info.as_list, - enum_values=None, - description=info.description, - example=info.example, - ) + { + "name": arg, + "type": to_type(info.type), + "as_list": info.as_list, + "enum_values": None, + "description": info.description, + "example": info.example, + } ) for arg in (a for a in dbview.replacements if a not in secure_replacements): @@ -78,23 +77,23 @@ def to_type(t): enum_values = None if isinstance(extra, list): enum_values = extra - if extra == int or extra == float: + if extra in (int, float): arg_type = to_type(extra) args.append( - dict( - name=arg, - type=arg_type, - as_list=False, - enum=enum_values, - description="", - ) + { + "name": arg, + "type": arg_type, + "as_list": False, + "enum": enum_values, + "description": "", + } ) filters = set() if "where" in dbview.replacements or "and_where" in dbview.replacements: # filter possible - for k in dbview.filters.keys(): + for k in dbview.filters: filters.add(k) if not filters: for k in list(dbview.columns.keys()): @@ -104,12 +103,12 @@ def to_type(t): # score query magic handling agg_score = connector.agg_score args.append( - dict( - name="agg", - type="string", - as_list=False, - enum=agg_score.valid_replacements.get("agg"), - ) + { + "name": "agg", + "type": "string", + "as_list": False, + "enum": agg_score.valid_replacements.get("agg"), + } ) props = [] @@ -122,9 +121,9 @@ def to_type(t): if dbview.idtype: # assume when id type given then we have ids - props.append(dict(name="_id", type="integer")) + props.append({"name": "_id", "type": "integer"}) if not any((p["name"] == "id" for p in props)): - props.append(dict(name="id", type="string")) + props.append({"name": "id", "type": "string"}) features = { "generic": dbview.query_type in ["generic", "helper", "table"], diff --git a/tdp_core/tests/fixtures/app.py b/tdp_core/tests/fixtures/app.py index a69b1ed77..500f8cbcf 100644 --- a/tdp_core/tests/fixtures/app.py +++ b/tdp_core/tests/fixtures/app.py @@ -9,24 +9,24 @@ from ...server.visyn_server import create_visyn_server -@pytest.fixture -def mock_plugins(monkeypatch): +@pytest.fixture() +def _mock_plugins(monkeypatch): def mock_current_user_in_manager(self): return permissions.User(id="admin") monkeypatch.setattr(SecurityManager, "current_user", property(mock_current_user_in_manager)) -@pytest.fixture -def app() -> Generator[FastAPI, Any, None]: - yield create_visyn_server( +@pytest.fixture() +def app() -> FastAPI: + return create_visyn_server( workspace_config={ "tdp_core": {"enabled_plugins": ["tdp_core"]}, } ) -@pytest.fixture +@pytest.fixture() def client(app: FastAPI) -> Generator[TestClient, Any, None]: with TestClient(app) as client: yield client diff --git a/tdp_core/tests/test_custom_encoders.py b/tdp_core/tests/test_custom_encoders.py index 2c983fb95..cf8bd32a7 100644 --- a/tdp_core/tests/test_custom_encoders.py +++ b/tdp_core/tests/test_custom_encoders.py @@ -11,9 +11,9 @@ def test_nan_values(app): # list that contains dictionary test_list_nested = [13, 5, 7, 12, test_dict, 22] # convert with to_json - test_result_simple = to_json(dict(myNum=test_var)) - test_result_list_simple = to_json(dict(myNum=test_list_simple)) - test_result_list_nested = to_json(dict(myNum=test_list_nested)) + test_result_simple = to_json({"myNum": test_var}) + test_result_list_simple = to_json({"myNum": test_list_simple}) + test_result_list_nested = to_json({"myNum": test_list_nested}) # make assertions assert test_result_simple == '{"myNum": null}' diff --git a/tdp_core/tests/test_mapper.py b/tdp_core/tests/test_mapper.py index 69003b239..f2b0dea52 100644 --- a/tdp_core/tests/test_mapper.py +++ b/tdp_core/tests/test_mapper.py @@ -19,7 +19,7 @@ def mapper(): ("ID6", "ID7", OneToMoreMappingTable("ID6", "ID7")), ] ) - yield mapper + return mapper def test_merge_2d_arrays(mapper): @@ -65,16 +65,16 @@ def test_merge_2d_arrays_invalid_length(mapper): def test_known_idtypes(mapper): - assert mapper.known_idtypes() == set(["ID1", "ID2", "ID3", "ID4", "ID5", "ID6", "ID7"]) + assert mapper.known_idtypes() == {"ID1", "ID2", "ID3", "ID4", "ID5", "ID6", "ID7"} def test_maps_to(mapper): - assert set(mapper.maps_to("ID1")) == set(["ID2", "ID3", "ID4"]) - assert set(mapper.maps_to("ID2")) == set(["ID1", "ID3", "ID4"]) - assert set(mapper.maps_to("ID3")) == set(["ID1", "ID2", "ID4"]) - assert set(mapper.maps_to("ID4")) == set(["ID1", "ID2", "ID3"]) - assert set(mapper.maps_to("ID5")) == set(["ID6", "ID7"]) - assert set(mapper.maps_to("ID6")) == set(["ID7"]) + assert set(mapper.maps_to("ID1")) == {"ID2", "ID3", "ID4"} + assert set(mapper.maps_to("ID2")) == {"ID1", "ID3", "ID4"} + assert set(mapper.maps_to("ID3")) == {"ID1", "ID2", "ID4"} + assert set(mapper.maps_to("ID4")) == {"ID1", "ID2", "ID3"} + assert set(mapper.maps_to("ID5")) == {"ID6", "ID7"} + assert set(mapper.maps_to("ID6")) == {"ID7"} def test_single_mapping(mapper): @@ -102,7 +102,7 @@ def test_transitive_merge_mapping(mapper): assert mapper("ID5", "ID7", [2, 4]) == [[2, 4, 6, 4, 8, 12, 6, 12, 18], [4, 8, 12, 8, 16, 24, 12, 24, 36]] -class OneToOneMappingTable(object): +class OneToOneMappingTable: def __init__(self, from_idtype, to_idtype): self.from_idtype = from_idtype self.to_idtype = to_idtype @@ -111,7 +111,7 @@ def __call__(self, ids): return [[id] for id in ids] -class OneToTwoMappingTable(object): +class OneToTwoMappingTable: def __init__(self, from_idtype, to_idtype): self.from_idtype = from_idtype self.to_idtype = to_idtype @@ -120,7 +120,7 @@ def __call__(self, ids): return [[id * 2] for id in ids] -class TwoToOneMappingTable(object): +class TwoToOneMappingTable: def __init__(self, from_idtype, to_idtype): self.from_idtype = from_idtype self.to_idtype = to_idtype @@ -129,7 +129,7 @@ def __call__(self, ids): return [[id / 2] for id in ids] -class OneToMoreMappingTable(object): +class OneToMoreMappingTable: def __init__(self, from_idtype, to_idtype): self.from_idtype = from_idtype self.to_idtype = to_idtype diff --git a/tdp_core/tests/test_rdkit_img.py b/tdp_core/tests/test_rdkit_img.py index 30a837195..13150f585 100644 --- a/tdp_core/tests/test_rdkit_img.py +++ b/tdp_core/tests/test_rdkit_img.py @@ -32,7 +32,7 @@ def test_invalid(client: TestClient, structure): assert res.status_code == 422 -@pytest.mark.parametrize("structure, expected", mol_expected.items()) +@pytest.mark.parametrize(("structure", "expected"), mol_expected.items()) def test_valid(client: TestClient, structure, expected): res = client.get("/api/rdkit/", params={"structure": structure}) assert res.status_code == 200 @@ -57,7 +57,7 @@ def test_murcko(client: TestClient): hash_compare(res.content, "5ef9373dd8bcf049a3632968774345527bab7ba757da1eaab943bccfe2ce7e32") -@pytest.mark.parametrize("mol, ref, expected", similarity_data) +@pytest.mark.parametrize(("mol", "ref", "expected"), similarity_data) def test_similarity(client: TestClient, mol, ref, expected): res = client.get("/api/rdkit/similarity/", params={"structure": mol, "reference": ref}) assert res.status_code == 200 @@ -76,11 +76,11 @@ def test_maximum_common_substructure(client: TestClient): def test_maximum_common_substructure_inconsistent(client: TestClient): """This method sometimes returns None -> 500 and sometimes a questionmark""" res = client.post("/api/rdkit/mcs/", json=["C1COCCO1", "CC(COC)OC", "CC1(OCCO1)C", "CCCCCCCO", "CCCCCCO"]) - print(res.content) if res.status_code == 200: hash_compare(res.content, "73e4c61270b280938b647dbad15552167f8cef259f5fc0c6f30a291c787d3b31") else: - assert res.status_code == 204 and res.content == b"null" + assert res.status_code == 204 + assert res.content == b"null" def test_substructures(client: TestClient): diff --git a/tdp_core/tests/test_security_login.py b/tdp_core/tests/test_security_login.py index bcebd4618..509ea1e38 100644 --- a/tdp_core/tests/test_security_login.py +++ b/tdp_core/tests/test_security_login.py @@ -1,5 +1,3 @@ -from typing import Dict - from fastapi.testclient import TestClient from tdp_core import manager @@ -36,7 +34,7 @@ def claims_loader_2(user: User): # Login with the dummy user response = client.post("/login", data={"username": "admin", "password": "admin"}) assert response.status_code == 200 - user: Dict = response.json() + user: dict = response.json() assert user["name"] == "admin" assert user["roles"] == ["admin"] assert user["payload"]["hello"] == "world" diff --git a/tdp_core/utils.py b/tdp_core/utils.py index cad2ff779..dab28d271 100644 --- a/tdp_core/utils.py +++ b/tdp_core/utils.py @@ -1,7 +1,5 @@ import json import logging -from builtins import range -from typing import Union from flask import abort, make_response from flask.wrappers import Response @@ -35,7 +33,7 @@ def map_scores(scores, from_idtype, to_idtype): mapped_ids = manager.id_mapping(from_idtype, to_idtype, [r["id"] for r in scores]) mapped_scores = [] - for score, mapped in zip(scores, mapped_ids): + for score, mapped in zip(scores, mapped_ids, strict=False): if not mapped: continue for target_id in mapped: @@ -108,7 +106,7 @@ def random_id(length): s = string.ascii_lowercase + string.digits id = "" - for i in range(0, length): + for _i in range(0, length): id += random.choice(s) return id @@ -119,7 +117,7 @@ class JSONExtensibleEncoder(json.JSONEncoder): """ def __init__(self, *args, **kwargs): - super(JSONExtensibleEncoder, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.encoders = [p.load().factory() for p in manager.registry.list("json-encoder")] @@ -127,7 +125,7 @@ def default(self, o): for encoder in self.encoders: if o in encoder: return encoder(o, self) - return super(JSONExtensibleEncoder, self).default(o) + return super().default(o) def to_json(obj, *args, **kwargs): @@ -145,12 +143,12 @@ def to_json(obj, *args, **kwargs): kwargs["ensure_ascii"] = False # Pandas JSON module has been deprecated and removed. UJson cannot convert numpy arrays, so it cannot be used here. The JSON used here does not support the `double_precision` keyword. - if isinstance(obj, float) or isinstance(obj, dict) or isinstance(obj, list): + if isinstance(obj, (float, dict, list)): obj = _handle_nan_values(obj) - return json.dumps(obj, cls=JSONExtensibleEncoder, *args, **kwargs) + return json.dumps(obj, *args, **kwargs, cls=JSONExtensibleEncoder) -def _handle_nan_values(obj_to_convert: Union[dict, list, float]) -> Union[dict, list, None]: +def _handle_nan_values(obj_to_convert: dict | list | float) -> dict | list | None: """ Convert any NaN values in the given object to None. Previously, Pandas was used to encode NaN to null. This feature has been deprecated and removed, therefore the standard JSON encoder is used which parses NaN instead of null. A custom JSON encoder does not work for converting these values to None because python's @@ -169,7 +167,7 @@ def _handle_nan_values(obj_to_convert: Union[dict, list, float]) -> Union[dict, if isinstance(obj_to_convert, dict): for k, v in obj_to_convert.items(): # value is dictionary or list - if isinstance(v, dict) or isinstance(v, list): + if isinstance(v, (dict, list)): converted_dict[k] = _handle_nan_values(v) else: # value is NaN diff --git a/tdp_core/xlsx.py b/tdp_core/xlsx.py index 1abef7b27..b7a63bb63 100644 --- a/tdp_core/xlsx.py +++ b/tdp_core/xlsx.py @@ -13,7 +13,7 @@ app = Flask(__name__) -_types = dict(b="boolean", s="string") +_types = {"b": "boolean", "s": "string"} def to_type(cell): @@ -24,7 +24,7 @@ def to_type(cell): if cell.data_type in _types: return _types[cell.data_type] v = cell.value - if isinstance(v, int) or isinstance(v, int): + if isinstance(v, (int, int)): return "int" if isinstance(v, float): return "float" @@ -48,7 +48,7 @@ def _xlsx2json(): def convert_row(row, cols): result = {} - for r, c in zip(cols, row): + for r, c in zip(cols, row, strict=False): result[c["name"]] = _convert_value(r.value) return result @@ -59,16 +59,16 @@ def convert_sheet(ws): ws_cols = next(ws_rows, []) ws_first_row = next(ws_rows, []) - cols = [dict(name=h.value, type=to_type(r)) for h, r in zip(ws_cols, ws_first_row)] + cols = [{"name": h.value, "type": to_type(r)} for h, r in zip(ws_cols, ws_first_row, strict=False)] rows = [] rows.append(convert_row(cols, ws_first_row)) for row in ws_rows: rows.append(str(convert_row(cols, row))) - return dict(title=ws.title, columns=cols, rows=rows) + return {"title": ws.title, "columns": cols, "rows": rows} - data = dict(sheets=[convert_sheet(ws) for ws in wb.worksheets]) + data = {"sheets": [convert_sheet(ws) for ws in wb.worksheets]} return jsonify(data)