14 Commits

Author SHA1 Message Date
8cdf91c8fb [Fix] Broken Model Creation (#356)
Co-authored-by: Tim Lorsbach <tim@lorsba.ch>
Reviewed-on: enviPath/enviPy#356
2026-03-12 11:34:14 +13:00
bafbf11322 [Fix] Broken Enzyme Links (#353)
Co-authored-by: Tim Lorsbach <tim@lorsba.ch>
Reviewed-on: enviPath/enviPy#353
2026-03-12 10:25:47 +13:00
f1a9456d1d [Fix] enviFormer prediction (#352)
Co-authored-by: Tim Lorsbach <tim@lorsba.ch>
Reviewed-on: enviPath/enviPy#352
2026-03-12 08:49:44 +13:00
e0764126e3 [Fix] Scenario Review Status + Depth issues (#351)
https://envipath.org/api/legacy/package/32de3cf4-e3e6-4168-956e-32fa5ddb0ce1/pathway/1d537657-298c-496b-9e6f-2bec0cbe0678

-> Node.depth can be float for Dummynodes
-> Scenarios in Edge.d3_json were lacking a reviewed flag

Co-authored-by: Tim Lorsbach <tim@lorsba.ch>
Reviewed-on: enviPath/enviPy#351
2026-03-12 08:28:20 +13:00
ef0c45b203 [Fix] Pepper display probability calculation (#349)
Probability of persistent is now calculated to include very persistent.

Reviewed-on: enviPath/enviPy#349
Co-authored-by: Liam Brydon <lbry121@aucklanduni.ac.nz>
Co-committed-by: Liam Brydon <lbry121@aucklanduni.ac.nz>
2026-03-11 19:12:55 +13:00
b737fc93eb [Feature] Search for Permissions, Prep Compound / Structure to be extended, Prep Template overwrites (#347)
Co-authored-by: Tim Lorsbach <tim@lorsba.ch>
Reviewed-on: enviPath/enviPy#347
2026-03-11 11:27:15 +13:00
d4295c9349 [Fix] bootstrap command now reflects new Scenario/AdditionalInformation structure (#346)
Co-authored-by: Tim Lorsbach <tim@lorsba.ch>
Reviewed-on: enviPath/enviPy#346
2026-03-07 03:14:28 +13:00
c6ff97694d [Feature] PEPPER in enviPath (#332)
Co-authored-by: Tim Lorsbach <tim@lorsba.ch>
Reviewed-on: enviPath/enviPy#332
2026-03-06 22:11:22 +13:00
6e00926371 [Feature] Scenario and Additional Information creation via enviPath-python, Add Half Lifes to API Output, Fix source/target ids in legacy API (#340)
Co-authored-by: Tim Lorsbach <tim@lorsba.ch>
Reviewed-on: enviPath/enviPy#340
2026-03-06 07:20:18 +13:00
81cc612e69 [Feature] Populate Batch Predict Table by CSV (#339)
Co-authored-by: Tim Lorsbach <tim@lorsba.ch>
Reviewed-on: enviPath/enviPy#339
2026-03-06 03:15:44 +13:00
cc9598775c [Fix] Fix Perm for creating entities (#341)
Co-authored-by: Tim Lorsbach <tim@lorsba.ch>
Reviewed-on: enviPath/enviPy#341
2026-02-27 03:56:33 +13:00
d2c2e643cb [Fix] Compound Grouping, Identity prediction of enviFormer, Setting params (#337)
Co-authored-by: Tim Lorsbach <tim@lorsba.ch>
Reviewed-on: enviPath/enviPy#337
2026-02-20 10:14:28 +13:00
0ff046363c [Fix] Fixed failing frontend tests due to renaming (#335)
Co-authored-by: Tim Lorsbach <tim@lorsba.ch>
Reviewed-on: enviPath/enviPy#335
2026-02-17 03:09:32 +13:00
5150027f0d [Fix] Login via email, prevent Usernames with certain chars 2026-02-16 13:58:06 +01:00
58 changed files with 5234 additions and 590 deletions

View File

@ -8,7 +8,7 @@ repos:
- id: end-of-file-fixer - id: end-of-file-fixer
- id: check-yaml - id: check-yaml
- id: check-added-large-files - id: check-added-large-files
exclude: ^static/images/ exclude: ^static/images/|fixtures/
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.13.3 rev: v0.13.3

0
bridge/__init__.py Normal file
View File

233
bridge/contracts.py Normal file
View File

@ -0,0 +1,233 @@
import enum
from abc import ABC, abstractmethod
from .dto import BuildResult, EnviPyDTO, EvaluationResult, RunResult
class PropertyType(enum.Enum):
"""
Enumeration representing different types of properties.
PropertyType is an Enum class that defines categories or types of properties
based on their weight or nature. It can typically be used when classifying
objects or entities by their weight classification, such as lightweight or heavy.
"""
LIGHTWEIGHT = "lightweight"
HEAVY = "heavy"
class Plugin(ABC):
"""
Defines an abstract base class Plugin to serve as a blueprint for plugins.
This class establishes the structure that all plugin implementations must
follow. It enforces the presence of required methods to ensure consistent
functionality across all derived classes.
"""
@abstractmethod
def identifier(self) -> str:
pass
@abstractmethod
def name(self) -> str:
"""
Represents an abstract method that provides a contract for implementing a method
to return a name as a string. Must be implemented in subclasses.
Name must be unique across all plugins.
Methods
-------
name() -> str
Abstract method to be defined in subclasses, which returns a string
representing a name.
"""
pass
@abstractmethod
def display(self) -> str:
"""
An abstract method that must be implemented by subclasses to display
specific information or behavior. The method ensures that all subclasses
provide their own implementation of the display functionality.
Raises:
NotImplementedError: Raises this error when the method is not implemented
in a subclass.
Returns:
str: A string used in dropdown menus or other user interfaces to display
"""
pass
class Property(Plugin):
@abstractmethod
def requires_rule_packages(self) -> bool:
"""
Defines an abstract method to determine whether rule packages are required.
This method should be implemented by subclasses to specify if they depend
on rule packages for their functioning.
Raises:
NotImplementedError: If the subclass has not implemented this method.
@return: A boolean indicating if rule packages are required.
"""
pass
@abstractmethod
def requires_data_packages(self) -> bool:
"""
Defines an abstract method to determine whether data packages are required.
This method should be implemented by subclasses to specify if they depend
on data packages for their functioning.
Raises:
NotImplementedError: If the subclass has not implemented this method.
Returns:
bool: True if the service requires data packages, False otherwise.
"""
pass
@abstractmethod
def get_type(self) -> PropertyType:
"""
An abstract method that provides the type of property. This method must
be implemented by subclasses to specify the appropriate property type.
Raises:
NotImplementedError: If the method is not implemented by a subclass.
Returns:
PropertyType: The type of the property associated with the implementation.
"""
pass
def is_heavy(self):
"""
Determines if the current property type is heavy.
This method evaluates whether the property type returned from the `get_type()`
method is classified as `HEAVY`. It utilizes the `PropertyType.HEAVY` constant
for this comparison.
Raises:
AttributeError: If the `get_type()` method is not defined or does not return
a valid value.
Returns:
bool: True if the property type is `HEAVY`, otherwise False.
"""
return self.get_type() == PropertyType.HEAVY
@abstractmethod
def build(self, eP: EnviPyDTO, *args, **kwargs) -> BuildResult | None:
"""
Abstract method to prepare and construct a specific build process based on the provided
environment data transfer object (EnviPyDTO). This method should be implemented by
subclasses to handle the particular requirements of the environment.
Parameters:
eP : EnviPyDTO
The data transfer object containing environment details for the build process.
*args :
Additional positional arguments required for the build.
**kwargs :
Additional keyword arguments to offer flexibility and customization for
the build process.
Returns:
BuildResult | None
Returns a BuildResult instance if the build operation succeeds, else returns None.
Raises:
NotImplementedError
If the method is not implemented in a subclass.
"""
pass
@abstractmethod
def run(self, eP: EnviPyDTO, *args, **kwargs) -> RunResult:
"""
Represents an abstract base class for executing a generic process with
provided parameters and returning a standardized result.
Attributes:
None.
Methods:
run(eP, *args, **kwargs):
Executes a task with specified input parameters and optional
arguments, returning the outcome in the form of a RunResult object.
This is an abstract method and must be implemented in subclasses.
Raises:
NotImplementedError: If the subclass does not implement the abstract
method.
Parameters:
eP (EnviPyDTO): The primary object containing information or data required
for processing. Mandatory.
*args: Variable length argument list for additional positional arguments.
**kwargs: Arbitrary keyword arguments for additional options or settings.
Returns:
RunResult: A result object encapsulating the status, output, or details
of the process execution.
"""
pass
@abstractmethod
def evaluate(self, eP: EnviPyDTO, *args, **kwargs) -> EvaluationResult:
"""
Abstract method for evaluating data based on the given input and additional arguments.
This method is intended to be implemented by subclasses and provides
a mechanism to perform an evaluation procedure based on input encapsulated
in an EnviPyDTO object.
Parameters:
eP : EnviPyDTO
The data transfer object containing necessary input for evaluation.
*args : tuple
Additional positional arguments for the evaluation process.
**kwargs : dict
Additional keyword arguments for the evaluation process.
Returns:
EvaluationResult
The result of the evaluation performed by the method.
Raises:
NotImplementedError
If the method is not implemented in the subclass.
"""
pass
@abstractmethod
def build_and_evaluate(self, eP: EnviPyDTO, *args, **kwargs) -> EvaluationResult:
"""
An abstract method designed to build and evaluate a model or system using the provided
environmental parameters and additional optional arguments.
Args:
eP (EnviPyDTO): The environmental parameters required for building and evaluating.
*args: Additional positional arguments.
**kwargs: Additional keyword arguments.
Returns:
EvaluationResult: The result of the evaluation process.
Raises:
NotImplementedError: If the method is not implemented by a subclass.
"""
pass

140
bridge/dto.py Normal file
View File

@ -0,0 +1,140 @@
from dataclasses import dataclass
from typing import Any, List, Optional, Protocol
from envipy_additional_information import EnviPyModel, register
from pydantic import HttpUrl
from utilities.chem import FormatConverter, ProductSet
@dataclass(frozen=True, slots=True)
class Context:
uuid: str
url: str
work_dir: str
class CompoundProto(Protocol):
url: str | None
name: str | None
smiles: str
class RuleProto(Protocol):
url: str
name: str
def apply(self, smiles, *args, **kwargs): ...
class ReactionProto(Protocol):
url: str
name: str
rules: List[RuleProto]
class EnviPyDTO(Protocol):
def get_context(self) -> Context: ...
def get_compounds(self) -> List[CompoundProto]: ...
def get_reactions(self) -> List[ReactionProto]: ...
def get_rules(self) -> List[RuleProto]: ...
@staticmethod
def standardize(smiles, remove_stereo=False, canonicalize_tautomers=False): ...
@staticmethod
def apply(
smiles: str,
smirks: str,
preprocess_smiles: bool = True,
bracketize: bool = True,
standardize: bool = True,
kekulize: bool = True,
remove_stereo: bool = True,
reactant_filter_smarts: str | None = None,
product_filter_smarts: str | None = None,
) -> List["ProductSet"]: ...
class PredictedProperty(EnviPyModel):
pass
@register("buildresult")
class BuildResult(EnviPyModel):
data: dict[str, Any] | List[dict[str, Any]] | None
@register("runresult")
class RunResult(EnviPyModel):
producer: HttpUrl
description: Optional[str] = None
result: PredictedProperty | List[PredictedProperty]
@register("evaluationresult")
class EvaluationResult(EnviPyModel):
data: dict[str, Any] | List[dict[str, Any]] | None
class BaseDTO(EnviPyDTO):
def __init__(
self,
uuid: str,
url: str,
work_dir: str,
compounds: List[CompoundProto],
reactions: List[ReactionProto],
rules: List[RuleProto],
):
self.uuid = uuid
self.url = url
self.work_dir = work_dir
self.compounds = compounds
self.reactions = reactions
self.rules = rules
def get_context(self) -> Context:
return Context(uuid=self.uuid, url=self.url, work_dir=self.work_dir)
def get_compounds(self) -> List[CompoundProto]:
return self.compounds
def get_reactions(self) -> List[ReactionProto]:
return self.reactions
def get_rules(self) -> List[RuleProto]:
return self.rules
@staticmethod
def standardize(smiles, remove_stereo=False, canonicalize_tautomers=False):
return FormatConverter.standardize(
smiles, remove_stereo=remove_stereo, canonicalize_tautomers=canonicalize_tautomers
)
@staticmethod
def apply(
smiles: str,
smirks: str,
preprocess_smiles: bool = True,
bracketize: bool = True,
standardize: bool = True,
kekulize: bool = True,
remove_stereo: bool = True,
reactant_filter_smarts: str | None = None,
product_filter_smarts: str | None = None,
) -> List["ProductSet"]:
return FormatConverter.apply(
smiles,
smirks,
preprocess_smiles,
bracketize,
standardize,
kekulize,
remove_stereo,
reactant_filter_smarts,
product_filter_smarts,
)

View File

@ -14,7 +14,6 @@ import os
from pathlib import Path from pathlib import Path
from dotenv import load_dotenv from dotenv import load_dotenv
from envipy_plugins import Classifier, Property, Descriptor
from sklearn.ensemble import RandomForestClassifier from sklearn.ensemble import RandomForestClassifier
from sklearn.tree import DecisionTreeClassifier from sklearn.tree import DecisionTreeClassifier
@ -93,10 +92,19 @@ if os.environ.get("REGISTRATION_MANDATORY", False) == "True":
ROOT_URLCONF = "envipath.urls" ROOT_URLCONF = "envipath.urls"
TEMPLATE_DIRS = [
os.path.join(BASE_DIR, "templates"),
]
# If we have a non-public tenant, we might need to overwrite some templates
# search TENANT folder first...
if TENANT != "public":
TEMPLATE_DIRS.insert(0, os.path.join(BASE_DIR, TENANT, "templates"))
TEMPLATES = [ TEMPLATES = [
{ {
"BACKEND": "django.template.backends.django.DjangoTemplates", "BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": (os.path.join(BASE_DIR, "templates"),), "DIRS": TEMPLATE_DIRS,
"APP_DIRS": True, "APP_DIRS": True,
"OPTIONS": { "OPTIONS": {
"context_processors": [ "context_processors": [
@ -128,6 +136,13 @@ DATABASES = {
} }
} }
if os.environ.get("USE_TEMPLATE_DB", False) == "True":
DATABASES["default"]["TEST"] = {
"NAME": f"test_{os.environ['TEMPLATE_DB']}",
"TEMPLATE": os.environ["TEMPLATE_DB"],
}
# Password validation # Password validation
# https://docs.djangoproject.com/en/4.2/ref/settings/#auth-password-validators # https://docs.djangoproject.com/en/4.2/ref/settings/#auth-password-validators
@ -311,19 +326,16 @@ DEFAULT_MODEL_PARAMS = {
"num_chains": 10, "num_chains": 10,
} }
DEFAULT_MAX_NUMBER_OF_NODES = 30 DEFAULT_MAX_NUMBER_OF_NODES = 50
DEFAULT_MAX_DEPTH = 5 DEFAULT_MAX_DEPTH = 8
DEFAULT_MODEL_THRESHOLD = 0.25 DEFAULT_MODEL_THRESHOLD = 0.25
# Loading Plugins # Loading Plugins
PLUGINS_ENABLED = os.environ.get("PLUGINS_ENABLED", "False") == "True" PLUGINS_ENABLED = os.environ.get("PLUGINS_ENABLED", "False") == "True"
if PLUGINS_ENABLED: BASE_PLUGINS = [
from utilities.plugin import discover_plugins "pepper.PEPPER",
]
CLASSIFIER_PLUGINS = discover_plugins(_cls=Classifier)
PROPERTY_PLUGINS = discover_plugins(_cls=Property)
DESCRIPTOR_PLUGINS = discover_plugins(_cls=Descriptor)
else:
CLASSIFIER_PLUGINS = {} CLASSIFIER_PLUGINS = {}
PROPERTY_PLUGINS = {} PROPERTY_PLUGINS = {}
DESCRIPTOR_PLUGINS = {} DESCRIPTOR_PLUGINS = {}

View File

@ -49,7 +49,6 @@ class AdditionalInformationAPITests(TestCase):
description="Test scenario for additional information tests", description="Test scenario for additional information tests",
scenario_type="biodegradation", scenario_type="biodegradation",
scenario_date="2024-01-01", scenario_date="2024-01-01",
additional_information={}, # Initialize with empty dict
) )
cls.other_scenario = Scenario.objects.create( cls.other_scenario = Scenario.objects.create(
package=cls.other_package, package=cls.other_package,
@ -57,7 +56,6 @@ class AdditionalInformationAPITests(TestCase):
description="Scenario in package without access", description="Scenario in package without access",
scenario_type="biodegradation", scenario_type="biodegradation",
scenario_date="2024-01-01", scenario_date="2024-01-01",
additional_information={},
) )
def test_list_all_schemas(self): def test_list_all_schemas(self):

View File

@ -60,7 +60,7 @@ class ScenarioCreationAPITests(TestCase):
) )
self.assertEqual(response.status_code, 404) self.assertEqual(response.status_code, 404)
self.assertIn("Package not found", response.json()["detail"]) self.assertIn(f"Package with UUID {fake_uuid} not found", response.json()["detail"])
def test_create_scenario_insufficient_permissions(self): def test_create_scenario_insufficient_permissions(self):
"""Test that unauthorized access returns 403.""" """Test that unauthorized access returns 403."""

View File

@ -41,6 +41,24 @@ def get_package_for_read(user, package_uuid: UUID):
return package return package
def get_package_for_write(user, package_uuid: UUID):
"""
Get package by UUID with permission check.
"""
# FIXME: update package manager with custom exceptions to avoid manual checks here
try:
package = Package.objects.get(uuid=package_uuid)
except Package.DoesNotExist:
raise EPAPINotFoundError(f"Package with UUID {package_uuid} not found")
# FIXME: optimize package manager to exclusively work with UUIDs
if not user or user.is_anonymous or not PackageManager.writable(user, package):
raise EPAPIPermissionDeniedError("Insufficient permissions to access this package.")
return package
def get_scenario_for_read(user, scenario_uuid: UUID): def get_scenario_for_read(user, scenario_uuid: UUID):
"""Get scenario by UUID with read permission check.""" """Get scenario by UUID with read permission check."""
try: try:

View File

@ -9,6 +9,7 @@ from envipy_additional_information import registry
from envipy_additional_information.groups import GroupEnum from envipy_additional_information.groups import GroupEnum
from epapi.utils.schema_transformers import build_rjsf_output from epapi.utils.schema_transformers import build_rjsf_output
from epapi.utils.validation_errors import handle_validation_error from epapi.utils.validation_errors import handle_validation_error
from epdb.models import AdditionalInformation
from ..dal import get_scenario_for_read, get_scenario_for_write from ..dal import get_scenario_for_read, get_scenario_for_write
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -44,12 +45,14 @@ def list_scenario_info(request, scenario_uuid: UUID):
scenario = get_scenario_for_read(request.user, scenario_uuid) scenario = get_scenario_for_read(request.user, scenario_uuid)
result = [] result = []
for ai in scenario.get_additional_information():
for ai in AdditionalInformation.objects.filter(scenario=scenario):
result.append( result.append(
{ {
"type": ai.__class__.__name__, "type": ai.get().__class__.__name__,
"uuid": getattr(ai, "uuid", None), "uuid": getattr(ai, "uuid", None),
"data": ai.model_dump(mode="json"), "data": ai.data,
"attach_object": ai.content_object.simple_json() if ai.content_object else None,
} }
) )
return result return result
@ -85,20 +88,17 @@ def update_scenario_info(
scenario = get_scenario_for_write(request.user, scenario_uuid) scenario = get_scenario_for_write(request.user, scenario_uuid)
ai_uuid_str = str(ai_uuid) ai_uuid_str = str(ai_uuid)
# Find item to determine type for validation ai = AdditionalInformation.objects.filter(uuid=ai_uuid_str, scenario=scenario)
found_type = None
for type_name, items in scenario.additional_information.items():
if any(item.get("uuid") == ai_uuid_str for item in items):
found_type = type_name
break
if found_type is None: if not ai.exists():
raise HttpError(404, f"Additional information not found: {ai_uuid}") raise HttpError(404, f"Additional information with UUID {ai_uuid} not found")
ai = ai.first()
# Get the model class for validation # Get the model class for validation
cls = registry.get_model(found_type.lower()) cls = registry.get_model(ai.type.lower())
if not cls: if not cls:
raise HttpError(500, f"Unknown model type in data: {found_type}") raise HttpError(500, f"Unknown model type in data: {ai.type}")
# Validate the payload against the model # Validate the payload against the model
try: try:

View File

@ -9,15 +9,14 @@ import logging
import json import json
from epdb.models import Scenario from epdb.models import Scenario
from epdb.logic import PackageManager
from epdb.views import _anonymous_or_real from epdb.views import _anonymous_or_real
from ..pagination import EnhancedPageNumberPagination from ..pagination import EnhancedPageNumberPagination
from ..schemas import ( from ..schemas import (
ReviewStatusFilter,
ScenarioOutSchema, ScenarioOutSchema,
ScenarioCreateSchema, ScenarioCreateSchema,
ScenarioReviewStatusAndRelatedFilter,
) )
from ..dal import get_user_entities_for_read, get_package_entities_for_read from ..dal import get_user_entities_for_read, get_package_entities_for_read, get_package_for_write
from envipy_additional_information import registry from envipy_additional_information import registry
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -29,7 +28,7 @@ router = Router()
@paginate( @paginate(
EnhancedPageNumberPagination, EnhancedPageNumberPagination,
page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE, page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE,
filter_schema=ScenarioReviewStatusAndRelatedFilter, filter_schema=ReviewStatusFilter,
) )
def list_all_scenarios(request): def list_all_scenarios(request):
user = request.user user = request.user
@ -44,7 +43,7 @@ def list_all_scenarios(request):
@paginate( @paginate(
EnhancedPageNumberPagination, EnhancedPageNumberPagination,
page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE, page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE,
filter_schema=ScenarioReviewStatusAndRelatedFilter, filter_schema=ReviewStatusFilter,
) )
def list_package_scenarios(request, package_uuid: UUID): def list_package_scenarios(request, package_uuid: UUID):
user = request.user user = request.user
@ -58,7 +57,7 @@ def create_scenario(request, package_uuid: UUID, payload: ScenarioCreateSchema =
user = _anonymous_or_real(request) user = _anonymous_or_real(request)
try: try:
current_package = PackageManager.get_package_by_id(user, package_uuid) current_package = get_package_for_write(user, package_uuid)
except ValueError as e: except ValueError as e:
error_msg = str(e) error_msg = str(e)
if "does not exist" in error_msg: if "does not exist" in error_msg:

View File

@ -22,12 +22,6 @@ class StructureReviewStatusFilter(FilterSchema):
review_status: Annotated[Optional[bool], FilterLookup("compound__package__reviewed")] = None review_status: Annotated[Optional[bool], FilterLookup("compound__package__reviewed")] = None
class ScenarioReviewStatusAndRelatedFilter(ReviewStatusFilter):
"""Filter schema for review_status and parent query parameter."""
exclude_related: Annotated[Optional[bool], FilterLookup("parent__isnull")] = None
# Base schema for all package-scoped entities # Base schema for all package-scoped entities
class PackageEntityOutSchema(Schema): class PackageEntityOutSchema(Schema):
"""Base schema for entities belonging to a package.""" """Base schema for entities belonging to a package."""

View File

@ -2,6 +2,7 @@ from django.conf import settings as s
from django.contrib import admin from django.contrib import admin
from .models import ( from .models import (
AdditionalInformation,
Compound, Compound,
CompoundStructure, CompoundStructure,
Edge, Edge,
@ -16,6 +17,7 @@ from .models import (
Node, Node,
ParallelRule, ParallelRule,
Pathway, Pathway,
PropertyPluginModel,
Reaction, Reaction,
Scenario, Scenario,
Setting, Setting,
@ -27,8 +29,20 @@ from .models import (
Package = s.GET_PACKAGE_MODEL() Package = s.GET_PACKAGE_MODEL()
class AdditionalInformationAdmin(admin.ModelAdmin):
pass
class UserAdmin(admin.ModelAdmin): class UserAdmin(admin.ModelAdmin):
list_display = ["username", "email", "is_active", "is_staff", "is_superuser"] list_display = [
"username",
"email",
"is_active",
"is_staff",
"is_superuser",
"last_login",
"date_joined",
]
class UserPackagePermissionAdmin(admin.ModelAdmin): class UserPackagePermissionAdmin(admin.ModelAdmin):
@ -65,6 +79,10 @@ class EnviFormerAdmin(EPAdmin):
pass pass
class PropertyPluginModelAdmin(admin.ModelAdmin):
pass
class LicenseAdmin(admin.ModelAdmin): class LicenseAdmin(admin.ModelAdmin):
list_display = ["cc_string", "link", "image_link"] list_display = ["cc_string", "link", "image_link"]
@ -117,6 +135,7 @@ class ExternalIdentifierAdmin(admin.ModelAdmin):
pass pass
admin.site.register(AdditionalInformation, AdditionalInformationAdmin)
admin.site.register(User, UserAdmin) admin.site.register(User, UserAdmin)
admin.site.register(UserPackagePermission, UserPackagePermissionAdmin) admin.site.register(UserPackagePermission, UserPackagePermissionAdmin)
admin.site.register(Group, GroupAdmin) admin.site.register(Group, GroupAdmin)
@ -125,6 +144,7 @@ admin.site.register(JobLog, JobLogAdmin)
admin.site.register(Package, PackageAdmin) admin.site.register(Package, PackageAdmin)
admin.site.register(MLRelativeReasoning, MLRelativeReasoningAdmin) admin.site.register(MLRelativeReasoning, MLRelativeReasoningAdmin)
admin.site.register(EnviFormer, EnviFormerAdmin) admin.site.register(EnviFormer, EnviFormerAdmin)
admin.site.register(PropertyPluginModel, PropertyPluginModelAdmin)
admin.site.register(License, LicenseAdmin) admin.site.register(License, LicenseAdmin)
admin.site.register(Compound, CompoundAdmin) admin.site.register(Compound, CompoundAdmin)
admin.site.register(CompoundStructure, CompoundStructureAdmin) admin.site.register(CompoundStructure, CompoundStructureAdmin)

View File

@ -15,3 +15,9 @@ class EPDBConfig(AppConfig):
model_name = getattr(settings, "EPDB_PACKAGE_MODEL", "epdb.Package") model_name = getattr(settings, "EPDB_PACKAGE_MODEL", "epdb.Package")
logger.info(f"Using Package model: {model_name}") logger.info(f"Using Package model: {model_name}")
if settings.PLUGINS_ENABLED:
from bridge.contracts import Property
from utilities.plugin import discover_plugins
settings.PROPERTY_PLUGINS.update(**discover_plugins(_cls=Property))

View File

@ -3,7 +3,7 @@ from typing import Any, Dict, List, Optional
import nh3 import nh3
from django.conf import settings as s from django.conf import settings as s
from django.contrib.auth import get_user_model from django.contrib.auth import get_user_model
from django.http import HttpResponse from django.http import HttpResponse, JsonResponse
from django.shortcuts import redirect from django.shortcuts import redirect
from ninja import Field, Form, Query, Router, Schema from ninja import Field, Form, Query, Router, Schema
from ninja.security import SessionAuth from ninja.security import SessionAuth
@ -37,6 +37,13 @@ from .models import (
Package = s.GET_PACKAGE_MODEL() Package = s.GET_PACKAGE_MODEL()
def get_package_for_write(user, package_uuid):
p = PackageManager.get_package_by_id(user, package_uuid)
if not PackageManager.writable(user, p):
raise ValueError("You do not have the rights to write to this Package!")
return p
def _anonymous_or_real(request): def _anonymous_or_real(request):
if request.user.is_authenticated and not request.user.is_anonymous: if request.user.is_authenticated and not request.user.is_anonymous:
return request.user return request.user
@ -87,6 +94,8 @@ class SimpleObject(Schema):
return "reviewed" if obj.compound.package.reviewed else "unreviewed" return "reviewed" if obj.compound.package.reviewed else "unreviewed"
elif isinstance(obj, Node) or isinstance(obj, Edge): elif isinstance(obj, Node) or isinstance(obj, Edge):
return "reviewed" if obj.pathway.package.reviewed else "unreviewed" return "reviewed" if obj.pathway.package.reviewed else "unreviewed"
elif isinstance(obj, dict) and "review_status" in obj:
return "reviewed" if obj.get("review_status") else "unreviewed"
else: else:
raise ValueError("Object has no package") raise ValueError("Object has no package")
@ -455,7 +464,7 @@ class UpdatePackage(Schema):
@router.post("/package/{uuid:package_uuid}", response={200: PackageSchema | Any, 400: Error}) @router.post("/package/{uuid:package_uuid}", response={200: PackageSchema | Any, 400: Error})
def update_package(request, package_uuid, pack: Form[UpdatePackage]): def update_package(request, package_uuid, pack: Form[UpdatePackage]):
try: try:
p = PackageManager.get_package_by_id(request.user, package_uuid) p = get_package_for_write(request.user, package_uuid)
if pack.hiddenMethod: if pack.hiddenMethod:
if pack.hiddenMethod == "DELETE": if pack.hiddenMethod == "DELETE":
@ -551,21 +560,42 @@ class CompoundSchema(Schema):
@staticmethod @staticmethod
def resolve_halflifes(obj: Compound): def resolve_halflifes(obj: Compound):
return [] res = []
for scen, hls in obj.half_lifes().items():
for hl in hls:
res.append(
{
"hl": str(hl.dt50),
"hlComment": hl.comment,
"hlFit": hl.fit,
"hlModel": hl.model,
"scenarioId": scen.url,
"scenarioName": scen.name,
"scenarioType": scen.scenario_type,
"source": hl.source,
}
)
return res
@staticmethod @staticmethod
def resolve_pubchem_compound_references(obj: Compound): def resolve_pubchem_compound_references(obj: Compound):
# TODO
return [] return []
@staticmethod @staticmethod
def resolve_pathway_scenarios(obj: Compound): def resolve_pathway_scenarios(obj: Compound):
return [ res = []
for pw in obj.related_pathways:
for scen in pw.scenarios.all():
res.append(
{ {
"scenarioId": "https://envipath.org/package/5882df9c-dae1-4d80-a40e-db4724271456/scenario/cd8350cd-4249-4111-ba9f-4e2209338501", "scenarioId": scen.url,
"scenarioName": "Fritz, R. & Brauner, A. (1989) - (00004)", "scenarioName": scen.name,
"scenarioType": "Soil", "scenarioType": scen.scenario_type,
} }
] )
return res
class CompoundStructureSchema(Schema): class CompoundStructureSchema(Schema):
@ -618,7 +648,22 @@ class CompoundStructureSchema(Schema):
@staticmethod @staticmethod
def resolve_halflifes(obj: CompoundStructure): def resolve_halflifes(obj: CompoundStructure):
return [] res = []
for scen, hls in obj.half_lifes().items():
for hl in hls:
res.append(
{
"hl": str(hl.dt50),
"hlComment": hl.comment,
"hlFit": hl.fit,
"hlModel": hl.model,
"scenarioId": scen.url,
"scenarioName": scen.name,
"scenarioType": scen.scenario_type,
"source": hl.source,
}
)
return res
@staticmethod @staticmethod
def resolve_pubchem_compound_references(obj: CompoundStructure): def resolve_pubchem_compound_references(obj: CompoundStructure):
@ -626,13 +671,18 @@ class CompoundStructureSchema(Schema):
@staticmethod @staticmethod
def resolve_pathway_scenarios(obj: CompoundStructure): def resolve_pathway_scenarios(obj: CompoundStructure):
return [ res = []
for pw in obj.related_pathways:
for scen in pw.scenarios.all():
res.append(
{ {
"scenarioId": "https://envipath.org/package/5882df9c-dae1-4d80-a40e-db4724271456/scenario/cd8350cd-4249-4111-ba9f-4e2209338501", "scenarioId": scen.url,
"scenarioName": "Fritz, R. & Brauner, A. (1989) - (00004)", "scenarioName": scen.name,
"scenarioType": "Soil", "scenarioType": scen.scenario_type,
} }
] )
return res
class CompoundStructureWrapper(Schema): class CompoundStructureWrapper(Schema):
@ -717,7 +767,7 @@ def create_package_compound(
c: Form[CreateCompound], c: Form[CreateCompound],
): ):
try: try:
p = PackageManager.get_package_by_id(request.user, package_uuid) p = get_package_for_write(request.user, package_uuid)
# inchi is not used atm # inchi is not used atm
c = Compound.create( c = Compound.create(
p, c.compoundSmiles, c.compoundName, c.compoundDescription, inchi=c.inchi p, c.compoundSmiles, c.compoundName, c.compoundDescription, inchi=c.inchi
@ -730,14 +780,10 @@ def create_package_compound(
@router.delete("/package/{uuid:package_uuid}/compound/{uuid:compound_uuid}") @router.delete("/package/{uuid:package_uuid}/compound/{uuid:compound_uuid}")
def delete_compound(request, package_uuid, compound_uuid): def delete_compound(request, package_uuid, compound_uuid):
try: try:
p = PackageManager.get_package_by_id(request.user, package_uuid) p = get_package_for_write(request.user, package_uuid)
if PackageManager.writable(request.user, p):
c = Compound.objects.get(package=p, uuid=compound_uuid) c = Compound.objects.get(package=p, uuid=compound_uuid)
c.delete() c.delete()
return redirect(f"{p.url}/compound") return redirect(f"{p.url}/compound")
else:
raise ValueError("You do not have the rights to delete this Compound!")
except ValueError: except ValueError:
return 403, { return 403, {
"message": f"Deleting Compound with id {compound_uuid} failed due to insufficient rights!" "message": f"Deleting Compound with id {compound_uuid} failed due to insufficient rights!"
@ -749,9 +795,8 @@ def delete_compound(request, package_uuid, compound_uuid):
) )
def delete_compound_structure(request, package_uuid, compound_uuid, structure_uuid): def delete_compound_structure(request, package_uuid, compound_uuid, structure_uuid):
try: try:
p = PackageManager.get_package_by_id(request.user, package_uuid) p = get_package_for_write(request.user, package_uuid)
if PackageManager.writable(request.user, p):
c = Compound.objects.get(package=p, uuid=compound_uuid) c = Compound.objects.get(package=p, uuid=compound_uuid)
cs = CompoundStructure.objects.get(compound=c, uuid=structure_uuid) cs = CompoundStructure.objects.get(compound=c, uuid=structure_uuid)
@ -772,8 +817,7 @@ def delete_compound_structure(request, package_uuid, compound_uuid, structure_uu
else: else:
cs.delete() cs.delete()
return redirect(c.url + "/structure") return redirect(c.url + "/structure")
else:
raise ValueError("You do not have the rights to delete this CompoundStructure!")
except ValueError: except ValueError:
return 403, { return 403, {
"message": f"Deleting CompoundStructure with id {compound_uuid} failed due to insufficient rights!" "message": f"Deleting CompoundStructure with id {compound_uuid} failed due to insufficient rights!"
@ -960,7 +1004,7 @@ def create_package_simple_rule(
r: Form[CreateSimpleRule], r: Form[CreateSimpleRule],
): ):
try: try:
p = PackageManager.get_package_by_id(request.user, package_uuid) p = get_package_for_write(request.user, package_uuid)
if r.rdkitrule and r.rdkitrule.strip() == "true": if r.rdkitrule and r.rdkitrule.strip() == "true":
raise ValueError("Not yet implemented!") raise ValueError("Not yet implemented!")
@ -996,7 +1040,7 @@ def create_package_parallel_rule(
r: Form[CreateParallelRule], r: Form[CreateParallelRule],
): ):
try: try:
p = PackageManager.get_package_by_id(request.user, package_uuid) p = get_package_for_write(request.user, package_uuid)
srs = SimpleRule.objects.filter(package=p, url__in=r.simpleRules) srs = SimpleRule.objects.filter(package=p, url__in=r.simpleRules)
@ -1040,7 +1084,7 @@ def post_package_parallel_rule(request, package_uuid, rule_uuid, compound: Form[
def _post_package_rule(request, package_uuid, rule_uuid, compound: Form[str]): def _post_package_rule(request, package_uuid, rule_uuid, compound: Form[str]):
try: try:
p = PackageManager.get_package_by_id(request.user, package_uuid) p = get_package_for_write(request.user, package_uuid)
r = Rule.objects.get(package=p, uuid=rule_uuid) r = Rule.objects.get(package=p, uuid=rule_uuid)
if compound is not None: if compound is not None:
@ -1085,14 +1129,11 @@ def delete_parallel_rule(request, package_uuid, rule_uuid):
def _delete_rule(request, package_uuid, rule_uuid): def _delete_rule(request, package_uuid, rule_uuid):
try: try:
p = PackageManager.get_package_by_id(request.user, package_uuid) p = get_package_for_write(request.user, package_uuid)
if PackageManager.writable(request.user, p):
r = Rule.objects.get(package=p, uuid=rule_uuid) r = Rule.objects.get(package=p, uuid=rule_uuid)
r.delete() r.delete()
return redirect(f"{p.url}/rule") return redirect(f"{p.url}/rule")
else:
raise ValueError("You do not have the rights to delete this Rule!")
except ValueError: except ValueError:
return 403, { return 403, {
"message": f"Deleting Rule with id {rule_uuid} failed due to insufficient rights!" "message": f"Deleting Rule with id {rule_uuid} failed due to insufficient rights!"
@ -1207,7 +1248,7 @@ def create_package_reaction(
r: Form[CreateReaction], r: Form[CreateReaction],
): ):
try: try:
p = PackageManager.get_package_by_id(request.user, package_uuid) p = get_package_for_write(request.user, package_uuid)
if r.smirks is None and (r.educt is None or r.product is None): if r.smirks is None and (r.educt is None or r.product is None):
raise ValueError("Either SMIRKS or educt/product must be provided") raise ValueError("Either SMIRKS or educt/product must be provided")
@ -1253,14 +1294,11 @@ def create_package_reaction(
@router.delete("/package/{uuid:package_uuid}/reaction/{uuid:reaction_uuid}") @router.delete("/package/{uuid:package_uuid}/reaction/{uuid:reaction_uuid}")
def delete_reaction(request, package_uuid, reaction_uuid): def delete_reaction(request, package_uuid, reaction_uuid):
try: try:
p = PackageManager.get_package_by_id(request.user, package_uuid) p = get_package_for_write(request.user, package_uuid)
if PackageManager.writable(request.user, p):
r = Reaction.objects.get(package=p, uuid=reaction_uuid) r = Reaction.objects.get(package=p, uuid=reaction_uuid)
r.delete() r.delete()
return redirect(f"{p.url}/reaction") return redirect(f"{p.url}/reaction")
else:
raise ValueError("You do not have the rights to delete this Reaction!")
except ValueError: except ValueError:
return 403, { return 403, {
"message": f"Deleting Reaction with id {reaction_uuid} failed due to insufficient rights!" "message": f"Deleting Reaction with id {reaction_uuid} failed due to insufficient rights!"
@ -1332,17 +1370,53 @@ def get_package_scenario(request, package_uuid, scenario_uuid):
} }
@router.delete("/package/{uuid:package_uuid}/scenario") @router.post("/package/{uuid:package_uuid}/scenario", response={200: str | Any, 403: Error})
def delete_scenarios(request, package_uuid, scenario_uuid): def create_package_scenario(request, package_uuid):
try: from utilities.legacy import build_additional_information_from_request
p = PackageManager.get_package_by_id(request.user, package_uuid)
try:
p = get_package_for_write(request.user, package_uuid)
scen_date = None
date_year = request.POST.get("dateYear")
date_month = request.POST.get("dateMonth")
date_day = request.POST.get("dateDay")
if date_year:
scen_date = date_year
if date_month:
scen_date += f"-{date_month}"
if date_day:
scen_date += f"-{date_day}"
name = request.POST.get("studyname")
description = request.POST.get("studydescription")
study_type = request.POST.get("type")
ais = []
types = request.POST.get("adInfoTypes[]", "").split(",")
for t in types:
ais.append(build_additional_information_from_request(request, t))
new_s = Scenario.create(p, name, description, scen_date, study_type, ais)
return JsonResponse({"scenarioLocation": new_s.url})
except ValueError:
return 403, {
"message": f"Getting Package with id {package_uuid} failed due to insufficient rights!"
}
@router.delete("/package/{uuid:package_uuid}/scenario")
def delete_scenarios(request, package_uuid):
try:
p = get_package_for_write(request.user, package_uuid)
if PackageManager.writable(request.user, p):
scens = Scenario.objects.filter(package=p) scens = Scenario.objects.filter(package=p)
scens.delete() scens.delete()
return redirect(f"{p.url}/scenario") return redirect(f"{p.url}/scenario")
else:
raise ValueError("You do not have the rights to delete Scenarios!")
except ValueError: except ValueError:
return 403, {"message": "Deleting Scenarios failed due to insufficient rights!"} return 403, {"message": "Deleting Scenarios failed due to insufficient rights!"}
@ -1350,14 +1424,12 @@ def delete_scenarios(request, package_uuid, scenario_uuid):
@router.delete("/package/{uuid:package_uuid}/scenario/{uuid:scenario_uuid}") @router.delete("/package/{uuid:package_uuid}/scenario/{uuid:scenario_uuid}")
def delete_scenario(request, package_uuid, scenario_uuid): def delete_scenario(request, package_uuid, scenario_uuid):
try: try:
p = PackageManager.get_package_by_id(request.user, package_uuid) p = get_package_for_write(request.user, package_uuid)
if PackageManager.writable(request.user, p):
scen = Scenario.objects.get(package=p, uuid=scenario_uuid) scen = Scenario.objects.get(package=p, uuid=scenario_uuid)
scen.delete() scen.delete()
return redirect(f"{p.url}/scenario") return redirect(f"{p.url}/scenario")
else:
raise ValueError("You do not have the rights to delete this Scenario!")
except ValueError: except ValueError:
return 403, { return 403, {
"message": f"Deleting Scenario with id {scenario_uuid} failed due to insufficient rights!" "message": f"Deleting Scenario with id {scenario_uuid} failed due to insufficient rights!"
@ -1380,8 +1452,8 @@ class PathwayEdge(Schema):
pseudo: bool = False pseudo: bool = False
rule: Optional[str] = Field(None, alias="rule") rule: Optional[str] = Field(None, alias="rule")
scenarios: List[SimpleScenario] = Field([], alias="scenarios") scenarios: List[SimpleScenario] = Field([], alias="scenarios")
source: int = -1 source: int = Field(-1)
target: int = -1 target: int = Field(-1)
@staticmethod @staticmethod
def resolve_rule(obj: Edge): def resolve_rule(obj: Edge):
@ -1394,7 +1466,7 @@ class PathwayEdge(Schema):
class PathwayNode(Schema): class PathwayNode(Schema):
atomCount: int = Field(None, alias="atom_count") atomCount: int = Field(None, alias="atom_count")
depth: int = Field(None, alias="depth") depth: float = Field(None, alias="depth")
dt50s: List[Dict[str, str]] = Field([], alias="dt50s") dt50s: List[Dict[str, str]] = Field([], alias="dt50s")
engineeredIntermediate: bool = Field(None, alias="engineered_intermediate") engineeredIntermediate: bool = Field(None, alias="engineered_intermediate")
id: str = Field(None, alias="url") id: str = Field(None, alias="url")
@ -1444,9 +1516,9 @@ class PathwaySchema(Schema):
isIncremental: bool = Field(None, alias="is_incremental") isIncremental: bool = Field(None, alias="is_incremental")
isPredicted: bool = Field(None, alias="is_predicted") isPredicted: bool = Field(None, alias="is_predicted")
lastModified: int = Field(None, alias="last_modified") lastModified: int = Field(None, alias="last_modified")
links: List[PathwayEdge] = Field([], alias="edges") links: List[PathwayEdge] = Field([])
name: str = Field(None, alias="name") name: str = Field(None, alias="name")
nodes: List[PathwayNode] = Field([], alias="nodes") nodes: List[PathwayNode] = Field([])
pathwayName: str = Field(None, alias="name") pathwayName: str = Field(None, alias="name")
reviewStatus: str = Field(None, alias="review_status") reviewStatus: str = Field(None, alias="review_status")
scenarios: List["SimpleScenario"] = Field([], alias="scenarios") scenarios: List["SimpleScenario"] = Field([], alias="scenarios")
@ -1468,6 +1540,14 @@ class PathwaySchema(Schema):
def resolve_last_modified(obj: Pathway): def resolve_last_modified(obj: Pathway):
return int(obj.modified.timestamp()) return int(obj.modified.timestamp())
@staticmethod
def resolve_links(obj: Pathway):
return obj.d3_json().get("links", [])
@staticmethod
def resolve_nodes(obj: Pathway):
return obj.d3_json().get("nodes", [])
@router.get("/pathway", response={200: PathwayWrapper, 403: Error}) @router.get("/pathway", response={200: PathwayWrapper, 403: Error})
def get_pathways(request): def get_pathways(request):
@ -1511,16 +1591,16 @@ class CreatePathway(Schema):
selectedSetting: str | None = None selectedSetting: str | None = None
@router.post("/package/{uuid:package_uuid}/pathway") @router.post("/package/{uuid:package_uuid}/pathway", response={200: Any, 403: Error})
def create_pathway( def create_package_pathway(
request, request,
package_uuid, package_uuid,
pw: Form[CreatePathway], pw: Form[CreatePathway],
): ):
try: try:
p = PackageManager.get_package_by_id(request.user, package_uuid) p = get_package_for_write(request.user, package_uuid)
stand_smiles = FormatConverter.standardize(pw.smilesinput.strip()) stand_smiles = FormatConverter.standardize(pw.smilesinput.strip(), remove_stereo=True)
new_pw = Pathway.create(p, stand_smiles, name=pw.name, description=pw.description) new_pw = Pathway.create(p, stand_smiles, name=pw.name, description=pw.description)
@ -1547,20 +1627,18 @@ def create_pathway(
return redirect(new_pw.url) return redirect(new_pw.url)
except ValueError as e: except ValueError as e:
return 400, {"message": str(e)} return 403, {"message": str(e)}
@router.delete("/package/{uuid:package_uuid}/pathway/{uuid:pathway_uuid}") @router.delete("/package/{uuid:package_uuid}/pathway/{uuid:pathway_uuid}")
def delete_pathway(request, package_uuid, pathway_uuid): def delete_pathway(request, package_uuid, pathway_uuid):
try: try:
p = PackageManager.get_package_by_id(request.user, package_uuid) p = get_package_for_write(request.user, package_uuid)
if PackageManager.writable(request.user, p):
pw = Pathway.objects.get(package=p, uuid=pathway_uuid) pw = Pathway.objects.get(package=p, uuid=pathway_uuid)
pw.delete() pw.delete()
return redirect(f"{p.url}/pathway") return redirect(f"{p.url}/pathway")
else:
raise ValueError("You do not have the rights to delete this pathway!")
except ValueError: except ValueError:
return 403, { return 403, {
"message": f"Deleting Pathway with id {pathway_uuid} failed due to insufficient rights!" "message": f"Deleting Pathway with id {pathway_uuid} failed due to insufficient rights!"
@ -1668,7 +1746,7 @@ class CreateNode(Schema):
) )
def add_pathway_node(request, package_uuid, pathway_uuid, n: Form[CreateNode]): def add_pathway_node(request, package_uuid, pathway_uuid, n: Form[CreateNode]):
try: try:
p = PackageManager.get_package_by_id(request.user, package_uuid) p = get_package_for_write(request.user, package_uuid)
pw = Pathway.objects.get(package=p, uuid=pathway_uuid) pw = Pathway.objects.get(package=p, uuid=pathway_uuid)
if n.nodeDepth is not None and n.nodeDepth.strip() != "": if n.nodeDepth is not None and n.nodeDepth.strip() != "":
@ -1686,15 +1764,13 @@ def add_pathway_node(request, package_uuid, pathway_uuid, n: Form[CreateNode]):
@router.delete("/package/{uuid:package_uuid}/pathway/{uuid:pathway_uuid}/node/{uuid:node_uuid}") @router.delete("/package/{uuid:package_uuid}/pathway/{uuid:pathway_uuid}/node/{uuid:node_uuid}")
def delete_node(request, package_uuid, pathway_uuid, node_uuid): def delete_node(request, package_uuid, pathway_uuid, node_uuid):
try: try:
p = PackageManager.get_package_by_id(request.user, package_uuid) p = get_package_for_write(request.user, package_uuid)
if PackageManager.writable(request.user, p):
pw = Pathway.objects.get(package=p, uuid=pathway_uuid) pw = Pathway.objects.get(package=p, uuid=pathway_uuid)
n = Node.objects.get(pathway=pw, uuid=node_uuid) n = Node.objects.get(pathway=pw, uuid=node_uuid)
n.delete() n.delete()
return redirect(f"{pw.url}/node") return redirect(f"{pw.url}/node")
else:
raise ValueError("You do not have the rights to delete this Node!")
except ValueError: except ValueError:
return 403, { return 403, {
"message": f"Deleting Node with id {node_uuid} failed due to insufficient rights!" "message": f"Deleting Node with id {node_uuid} failed due to insufficient rights!"
@ -1731,7 +1807,7 @@ class EdgeSchema(Schema):
startNodes: List["EdgeNode"] = Field([], alias="start_nodes") startNodes: List["EdgeNode"] = Field([], alias="start_nodes")
@staticmethod @staticmethod
def resolve_review_status(obj: Node): def resolve_review_status(obj: Edge):
return "reviewed" if obj.pathway.package.reviewed else "unreviewed" return "reviewed" if obj.pathway.package.reviewed else "unreviewed"
@ -1778,7 +1854,7 @@ class CreateEdge(Schema):
) )
def add_pathway_edge(request, package_uuid, pathway_uuid, e: Form[CreateEdge]): def add_pathway_edge(request, package_uuid, pathway_uuid, e: Form[CreateEdge]):
try: try:
p = PackageManager.get_package_by_id(request.user, package_uuid) p = get_package_for_write(request.user, package_uuid)
pw = Pathway.objects.get(package=p, uuid=pathway_uuid) pw = Pathway.objects.get(package=p, uuid=pathway_uuid)
if e.edgeAsSmirks is None and (e.educts is None or e.products is None): if e.edgeAsSmirks is None and (e.educts is None or e.products is None):
@ -1836,15 +1912,13 @@ def add_pathway_edge(request, package_uuid, pathway_uuid, e: Form[CreateEdge]):
@router.delete("/package/{uuid:package_uuid}/pathway/{uuid:pathway_uuid}/edge/{uuid:edge_uuid}") @router.delete("/package/{uuid:package_uuid}/pathway/{uuid:pathway_uuid}/edge/{uuid:edge_uuid}")
def delete_edge(request, package_uuid, pathway_uuid, edge_uuid): def delete_edge(request, package_uuid, pathway_uuid, edge_uuid):
try: try:
p = PackageManager.get_package_by_id(request.user, package_uuid) p = get_package_for_write(request.user, package_uuid)
if PackageManager.writable(request.user, p):
pw = Pathway.objects.get(package=p, uuid=pathway_uuid) pw = Pathway.objects.get(package=p, uuid=pathway_uuid)
e = Edge.objects.get(pathway=pw, uuid=edge_uuid) e = Edge.objects.get(pathway=pw, uuid=edge_uuid)
e.delete() e.delete()
return redirect(f"{pw.url}/edge") return redirect(f"{pw.url}/edge")
else:
raise ValueError("You do not have the rights to delete this Edge!")
except ValueError: except ValueError:
return 403, { return 403, {
"message": f"Deleting Edge with id {edge_uuid} failed due to insufficient rights!" "message": f"Deleting Edge with id {edge_uuid} failed due to insufficient rights!"
@ -1937,7 +2011,7 @@ def get_model(request, package_uuid, model_uuid, c: Query[Classify]):
return 400, {"message": "Received empty SMILES"} return 400, {"message": "Received empty SMILES"}
try: try:
stand_smiles = FormatConverter.standardize(c.smiles) stand_smiles = FormatConverter.standardize(c.smiles, remove_stereo=True)
except ValueError: except ValueError:
return 400, {"message": f'"{c.smiles}" is not a valid SMILES'} return 400, {"message": f'"{c.smiles}" is not a valid SMILES'}
@ -1980,14 +2054,11 @@ def get_model(request, package_uuid, model_uuid, c: Query[Classify]):
@router.delete("/package/{uuid:package_uuid}/model/{uuid:model_uuid}") @router.delete("/package/{uuid:package_uuid}/model/{uuid:model_uuid}")
def delete_model(request, package_uuid, model_uuid): def delete_model(request, package_uuid, model_uuid):
try: try:
p = PackageManager.get_package_by_id(request.user, package_uuid) p = get_package_for_write(request.user, package_uuid)
if PackageManager.writable(request.user, p):
m = EPModel.objects.get(package=p, uuid=model_uuid) m = EPModel.objects.get(package=p, uuid=model_uuid)
m.delete() m.delete()
return redirect(f"{p.url}/model") return redirect(f"{p.url}/model")
else:
raise ValueError("You do not have the rights to delete this Model!")
except ValueError: except ValueError:
return 403, { return 403, {
"message": f"Deleting Model with id {model_uuid} failed due to insufficient rights!" "message": f"Deleting Model with id {model_uuid} failed due to insufficient rights!"

View File

@ -1,4 +1,3 @@
import json
import logging import logging
import re import re
from typing import Any, Dict, List, Optional, Set, Union, Tuple from typing import Any, Dict, List, Optional, Set, Union, Tuple
@ -11,6 +10,7 @@ from django.db import transaction
from pydantic import ValidationError from pydantic import ValidationError
from epdb.models import ( from epdb.models import (
AdditionalInformation,
Compound, Compound,
CompoundStructure, CompoundStructure,
Edge, Edge,
@ -22,6 +22,7 @@ from epdb.models import (
Node, Node,
Pathway, Pathway,
Permission, Permission,
PropertyPluginModel,
Reaction, Reaction,
Rule, Rule,
Setting, Setting,
@ -633,15 +634,30 @@ class PackageManager(object):
# Stores old_id to new_id # Stores old_id to new_id
mapping = {} mapping = {}
# Stores new_scen_id to old_parent_scen_id
parent_mapping = {}
# Mapping old scen_id to old_obj_id # Mapping old scen_id to old_obj_id
scen_mapping = defaultdict(list) scen_mapping = defaultdict(list)
# Enzymelink Mapping rule_id to enzymelink objects # Enzymelink Mapping rule_id to enzymelink objects
enzyme_mapping = defaultdict(list) enzyme_mapping = defaultdict(list)
# old_parent_id to child
postponed_scens = defaultdict(list)
# Store Scenarios # Store Scenarios
for scenario in data["scenarios"]: for scenario in data["scenarios"]:
skip_scen = False
# Check if parent exists and park this Scenario to convert it later into an
# AdditionalInformation object
for ex in scenario.get("additionalInformationCollection", {}).get(
"additionalInformation", []
):
if ex["name"] == "referringscenario":
postponed_scens[ex["data"]].append(scenario)
skip_scen = True
break
if skip_scen:
continue
scen = Scenario() scen = Scenario()
scen.package = pack scen.package = pack
scen.uuid = UUID(scenario["id"].split("/")[-1]) if keep_ids else uuid4() scen.uuid = UUID(scenario["id"].split("/")[-1]) if keep_ids else uuid4()
@ -654,19 +670,12 @@ class PackageManager(object):
mapping[scenario["id"]] = scen.uuid mapping[scenario["id"]] = scen.uuid
new_add_inf = defaultdict(list)
# TODO Store AI...
for ex in scenario.get("additionalInformationCollection", {}).get( for ex in scenario.get("additionalInformationCollection", {}).get(
"additionalInformation", [] "additionalInformation", []
): ):
name = ex["name"] name = ex["name"]
addinf_data = ex["data"] addinf_data = ex["data"]
# park the parent scen id for now and link it later
if name == "referringscenario":
parent_mapping[scen.uuid] = addinf_data
continue
# Broken eP Data # Broken eP Data
if name == "initialmasssediment" and addinf_data == "missing data": if name == "initialmasssediment" and addinf_data == "missing data":
continue continue
@ -674,17 +683,11 @@ class PackageManager(object):
continue continue
try: try:
res = AdditionalInformationConverter.convert(name, addinf_data) ai = AdditionalInformationConverter.convert(name, addinf_data)
res_cls_name = res.__class__.__name__ AdditionalInformation.create(pack, ai, scenario=scen)
ai_data = json.loads(res.model_dump_json())
ai_data["uuid"] = f"{uuid4()}"
new_add_inf[res_cls_name].append(ai_data)
except (ValidationError, ValueError): except (ValidationError, ValueError):
logger.error(f"Failed to convert {name} with {addinf_data}") logger.error(f"Failed to convert {name} with {addinf_data}")
scen.additional_information = new_add_inf
scen.save()
print("Scenarios imported...") print("Scenarios imported...")
# Store compounds and its structures # Store compounds and its structures
@ -924,14 +927,46 @@ class PackageManager(object):
print("Pathways imported...") print("Pathways imported...")
# Linking Phase for parent, children in postponed_scens.items():
for child, parent in parent_mapping.items(): for child in children:
child_obj = Scenario.objects.get(uuid=child) for ex in child.get("additionalInformationCollection", {}).get(
parent_obj = Scenario.objects.get(uuid=mapping[parent]) "additionalInformation", []
child_obj.parent = parent_obj ):
child_obj.save() child_id = child["id"]
name = ex["name"]
addinf_data = ex["data"]
if name == "referringscenario":
continue
# Broken eP Data
if name == "initialmasssediment" and addinf_data == "missing data":
continue
if name == "columnheight" and addinf_data == "(2)-(2.5);(6)-(8)":
continue
ai = AdditionalInformationConverter.convert(name, addinf_data)
if child_id not in scen_mapping:
logger.info(
f"{child_id} not found in scen_mapping. Seems like its not attached to any object"
)
print(
f"{child_id} not found in scen_mapping. Seems like its not attached to any object"
)
scen = Scenario.objects.get(uuid=mapping[parent])
mapping[child_id] = scen.uuid
for obj in scen_mapping[child_id]:
_ = AdditionalInformation.create(pack, ai, scen, content_object=obj)
for scen_id, objects in scen_mapping.items(): for scen_id, objects in scen_mapping.items():
new_id = mapping.get(scen_id)
if new_id is None:
logger.warning(f"Could not find mapping for {scen_id}")
print(f"Could not find mapping for {scen_id}")
continue
scen = Scenario.objects.get(uuid=mapping[scen_id]) scen = Scenario.objects.get(uuid=mapping[scen_id])
for o in objects: for o in objects:
o.scenarios.add(scen) o.scenarios.add(scen)
@ -964,6 +999,7 @@ class PackageManager(object):
matches = re.findall(r">(R[0-9]+)<", evidence["evidence"]) matches = re.findall(r">(R[0-9]+)<", evidence["evidence"])
if not matches or len(matches) != 1: if not matches or len(matches) != 1:
logger.warning(f"Could not find reaction id in {evidence['evidence']}") logger.warning(f"Could not find reaction id in {evidence['evidence']}")
print(f"Could not find reaction id in {evidence['evidence']}")
continue continue
e.add_kegg_reaction_id(matches[0]) e.add_kegg_reaction_id(matches[0])
@ -983,7 +1019,6 @@ class PackageManager(object):
print("Fixing Node depths...") print("Fixing Node depths...")
total_pws = Pathway.objects.filter(package=pack).count() total_pws = Pathway.objects.filter(package=pack).count()
for p, pw in enumerate(Pathway.objects.filter(package=pack)): for p, pw in enumerate(Pathway.objects.filter(package=pack)):
print(pw.url)
in_count = defaultdict(lambda: 0) in_count = defaultdict(lambda: 0)
out_count = defaultdict(lambda: 0) out_count = defaultdict(lambda: 0)
@ -1019,7 +1054,6 @@ class PackageManager(object):
if str(prod.uuid) not in seen: if str(prod.uuid) not in seen:
old_depth = prod.depth old_depth = prod.depth
if old_depth != i + 1: if old_depth != i + 1:
print(f"updating depth from {old_depth} to {i + 1}")
prod.depth = i + 1 prod.depth = i + 1
prod.save() prod.save()
@ -1030,7 +1064,7 @@ class PackageManager(object):
if new_level: if new_level:
levels.append(new_level) levels.append(new_level)
print(f"{p + 1}/{total_pws} fixed.") print(f"{p + 1}/{total_pws} fixed.", end="\r")
return pack return pack
@ -1109,19 +1143,23 @@ class SettingManager(object):
description: str = None, description: str = None,
max_nodes: int = None, max_nodes: int = None,
max_depth: int = None, max_depth: int = None,
rule_packages: List[Package] = None, rule_packages: List[Package] | None = None,
model: EPModel = None, model: EPModel = None,
model_threshold: float = None, model_threshold: float = None,
expansion_scheme: ExpansionSchemeChoice = ExpansionSchemeChoice.BFS, expansion_scheme: ExpansionSchemeChoice = ExpansionSchemeChoice.BFS,
property_models: List["PropertyPluginModel"] | None = None,
): ):
new_s = Setting() new_s = Setting()
# Clean for potential XSS # Clean for potential XSS
new_s.name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip() new_s.name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
new_s.description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip() new_s.description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip()
new_s.max_nodes = max_nodes new_s.max_nodes = max_nodes
new_s.max_depth = max_depth new_s.max_depth = max_depth
new_s.model = model new_s.model = model
new_s.model_threshold = model_threshold new_s.model_threshold = model_threshold
new_s.expansion_scheme = expansion_scheme
new_s.save() new_s.save()
@ -1130,6 +1168,11 @@ class SettingManager(object):
new_s.rule_packages.add(r) new_s.rule_packages.add(r)
new_s.save() new_s.save()
if property_models is not None:
for pm in property_models:
new_s.property_models.add(pm)
new_s.save()
usp = UserSettingPermission() usp = UserSettingPermission()
usp.user = user usp.user = user
usp.setting = new_s usp.setting = new_s

View File

@ -41,9 +41,7 @@ class Command(BaseCommand):
"SequentialRule", "SequentialRule",
"Scenario", "Scenario",
"Setting", "Setting",
"MLRelativeReasoning", "EPModel",
"RuleBasedRelativeReasoning",
"EnviFormer",
"ApplicabilityDomain", "ApplicabilityDomain",
"EnzymeLink", "EnzymeLink",
] ]

View File

@ -0,0 +1,83 @@
import os
import subprocess
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
"-n",
"--name",
type=str,
help="Name of the database to recreate. Default is 'appdb'",
default="appdb",
)
parser.add_argument(
"-d",
"--dump",
type=str,
help="Path to the dump file",
default="./fixtures/db.dump",
)
parser.add_argument(
"-ou",
"--oldurl",
type=str,
help="Old URL, e.g. https://envipath.org/",
default="https://envipath.org/",
)
parser.add_argument(
"-nu",
"--newurl",
type=str,
help="New URL, e.g. http://localhost:8000/",
default="http://localhost:8000/",
)
def handle(self, *args, **options):
dump_file = options["dump"]
if not os.path.exists(dump_file):
raise ValueError(f"Dump file {dump_file} does not exist")
db_name = options["name"]
print(f"Dropping database {db_name} y/n: ", end="")
if input() in "yY":
result = subprocess.run(
["dropdb", db_name],
capture_output=True,
text=True,
)
print(result.stdout)
else:
raise ValueError("Aborted")
print(f"Creating database {db_name}")
result = subprocess.run(
["createdb", db_name],
capture_output=True,
text=True,
)
print(result.stdout)
print(f"Restoring database {db_name} from {dump_file}")
result = subprocess.run(
["pg_restore", "-d", db_name, dump_file, "--no-owner"],
capture_output=True,
text=True,
)
print(result.stdout)
if db_name == settings.DATABASES["default"]["NAME"]:
call_command("localize_urls", "--old", options["oldurl"], "--new", options["newurl"])
else:
print("Skipping localize_urls as database is not the default one.")

View File

@ -0,0 +1,179 @@
# Generated by Django 5.2.7 on 2026-02-12 09:38
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("epdb", "0015_user_is_reviewer"),
]
operations = [
migrations.RemoveField(
model_name="enviformer",
name="model_status",
),
migrations.RemoveField(
model_name="mlrelativereasoning",
name="model_status",
),
migrations.RemoveField(
model_name="rulebasedrelativereasoning",
name="model_status",
),
migrations.AddField(
model_name="epmodel",
name="model_status",
field=models.CharField(
choices=[
("INITIAL", "Initial"),
("INITIALIZING", "Model is initializing."),
("BUILDING", "Model is building."),
(
"BUILT_NOT_EVALUATED",
"Model is built and can be used for predictions, Model is not evaluated yet.",
),
("EVALUATING", "Model is evaluating"),
("FINISHED", "Model has finished building and evaluation."),
("ERROR", "Model has failed."),
],
default="INITIAL",
),
),
migrations.AlterField(
model_name="enviformer",
name="eval_packages",
field=models.ManyToManyField(
blank=True,
related_name="%(app_label)s_%(class)s_eval_packages",
to=settings.EPDB_PACKAGE_MODEL,
verbose_name="Evaluation Packages",
),
),
migrations.AlterField(
model_name="enviformer",
name="rule_packages",
field=models.ManyToManyField(
blank=True,
related_name="%(app_label)s_%(class)s_rule_packages",
to=settings.EPDB_PACKAGE_MODEL,
verbose_name="Rule Packages",
),
),
migrations.AlterField(
model_name="mlrelativereasoning",
name="eval_packages",
field=models.ManyToManyField(
blank=True,
related_name="%(app_label)s_%(class)s_eval_packages",
to=settings.EPDB_PACKAGE_MODEL,
verbose_name="Evaluation Packages",
),
),
migrations.AlterField(
model_name="mlrelativereasoning",
name="rule_packages",
field=models.ManyToManyField(
blank=True,
related_name="%(app_label)s_%(class)s_rule_packages",
to=settings.EPDB_PACKAGE_MODEL,
verbose_name="Rule Packages",
),
),
migrations.AlterField(
model_name="rulebasedrelativereasoning",
name="eval_packages",
field=models.ManyToManyField(
blank=True,
related_name="%(app_label)s_%(class)s_eval_packages",
to=settings.EPDB_PACKAGE_MODEL,
verbose_name="Evaluation Packages",
),
),
migrations.AlterField(
model_name="rulebasedrelativereasoning",
name="rule_packages",
field=models.ManyToManyField(
blank=True,
related_name="%(app_label)s_%(class)s_rule_packages",
to=settings.EPDB_PACKAGE_MODEL,
verbose_name="Rule Packages",
),
),
migrations.CreateModel(
name="PropertyPluginModel",
fields=[
(
"epmodel_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="epdb.epmodel",
),
),
("threshold", models.FloatField(default=0.5)),
("eval_results", models.JSONField(blank=True, default=dict, null=True)),
("multigen_eval", models.BooleanField(default=False)),
("plugin_identifier", models.CharField(max_length=255)),
(
"app_domain",
models.ForeignKey(
blank=True,
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="epdb.applicabilitydomain",
),
),
(
"data_packages",
models.ManyToManyField(
blank=True,
related_name="%(app_label)s_%(class)s_data_packages",
to=settings.EPDB_PACKAGE_MODEL,
verbose_name="Data Packages",
),
),
(
"eval_packages",
models.ManyToManyField(
blank=True,
related_name="%(app_label)s_%(class)s_eval_packages",
to=settings.EPDB_PACKAGE_MODEL,
verbose_name="Evaluation Packages",
),
),
(
"rule_packages",
models.ManyToManyField(
blank=True,
related_name="%(app_label)s_%(class)s_rule_packages",
to=settings.EPDB_PACKAGE_MODEL,
verbose_name="Rule Packages",
),
),
],
options={
"abstract": False,
},
bases=("epdb.epmodel",),
),
migrations.AddField(
model_name="setting",
name="property_models",
field=models.ManyToManyField(
blank=True,
related_name="settings",
to="epdb.propertypluginmodel",
verbose_name="Setting Property Models",
),
),
migrations.DeleteModel(
name="PluginModel",
),
]

View File

@ -0,0 +1,93 @@
# Generated by Django 5.2.7 on 2026-02-20 12:02
import django.db.models.deletion
import uuid
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("contenttypes", "0002_remove_content_type_name"),
("epdb", "0016_remove_enviformer_model_status_and_more"),
]
operations = [
migrations.CreateModel(
name="AdditionalInformation",
fields=[
(
"id",
models.BigAutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("uuid", models.UUIDField(default=uuid.uuid4, editable=False, unique=True)),
("url", models.TextField(null=True, unique=True, verbose_name="URL")),
("kv", models.JSONField(blank=True, default=dict, null=True)),
("type", models.TextField(verbose_name="Additional Information Type")),
("data", models.JSONField(blank=True, default=dict, null=True)),
("object_id", models.PositiveBigIntegerField(blank=True, null=True)),
(
"content_type",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="contenttypes.contenttype",
),
),
(
"package",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to=settings.EPDB_PACKAGE_MODEL,
verbose_name="Package",
),
),
(
"scenario",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="scenario_additional_information",
to="epdb.scenario",
),
),
],
options={
"indexes": [
models.Index(fields=["type"], name="epdb_additi_type_394349_idx"),
models.Index(
fields=["scenario", "type"], name="epdb_additi_scenari_a59edf_idx"
),
models.Index(
fields=["content_type", "object_id"], name="epdb_additi_content_44d4b4_idx"
),
models.Index(
fields=["scenario", "content_type", "object_id"],
name="epdb_additi_scenari_ef2bf5_idx",
),
],
"constraints": [
models.CheckConstraint(
condition=models.Q(
models.Q(("content_type__isnull", True), ("object_id__isnull", True)),
models.Q(("content_type__isnull", False), ("object_id__isnull", False)),
_connector="OR",
),
name="ck_addinfo_gfk_pair",
),
models.CheckConstraint(
condition=models.Q(
("scenario__isnull", False),
("content_type__isnull", False),
_connector="OR",
),
name="ck_addinfo_not_both_null",
),
],
},
),
]

View File

@ -0,0 +1,132 @@
# Generated by Django 5.2.7 on 2026-02-20 12:03
from django.db import migrations
def get_additional_information(scenario):
from envipy_additional_information import registry
from envipy_additional_information.parsers import TypeOfAerationParser
for k, vals in scenario.additional_information.items():
if k == "enzyme":
continue
if k == "SpikeConentration":
k = "SpikeConcentration"
if k == "AerationType":
k = "TypeOfAeration"
for v in vals:
# Per default additional fields are ignored
MAPPING = {c.__name__: c for c in registry.list_models().values()}
try:
inst = MAPPING[k](**v)
except Exception:
if k == "TypeOfAeration":
toa = TypeOfAerationParser()
inst = toa.from_string(v["type"])
# Add uuid to uniquely identify objects for manipulation
if "uuid" in v:
inst.__dict__["uuid"] = v["uuid"]
yield inst
def forward_func(apps, schema_editor):
Scenario = apps.get_model("epdb", "Scenario")
ContentType = apps.get_model("contenttypes", "ContentType")
AdditionalInformation = apps.get_model("epdb", "AdditionalInformation")
bulk = []
related = []
ctype = {o.model: o for o in ContentType.objects.all()}
parents = Scenario.objects.prefetch_related(
"compound_set",
"compoundstructure_set",
"reaction_set",
"rule_set",
"pathway_set",
"node_set",
"edge_set",
).filter(parent__isnull=True)
for i, scenario in enumerate(parents):
print(f"{i + 1}/{len(parents)}", end="\r")
if scenario.parent is not None:
related.append(scenario.parent)
continue
for ai in get_additional_information(scenario):
bulk.append(
AdditionalInformation(
package=scenario.package,
scenario=scenario,
type=ai.__class__.__name__,
data=ai.model_dump(mode="json"),
)
)
print("\n", len(bulk))
related = Scenario.objects.prefetch_related(
"compound_set",
"compoundstructure_set",
"reaction_set",
"rule_set",
"pathway_set",
"node_set",
"edge_set",
).filter(parent__isnull=False)
for i, scenario in enumerate(related):
print(f"{i + 1}/{len(related)}", end="\r")
parent = scenario.parent
# Check to which objects this scenario is attached to
for ai in get_additional_information(scenario):
rel_objs = [
"compound",
"compoundstructure",
"reaction",
"rule",
"pathway",
"node",
"edge",
]
for rel_obj in rel_objs:
for o in getattr(scenario, f"{rel_obj}_set").all():
bulk.append(
AdditionalInformation(
package=scenario.package,
scenario=parent,
type=ai.__class__.__name__,
data=ai.model_dump(mode="json"),
content_type=ctype[rel_obj],
object_id=o.pk,
)
)
print("Start creating additional information objects...")
AdditionalInformation.objects.bulk_create(bulk)
print("Done!")
print(len(bulk))
Scenario.objects.filter(parent__isnull=False).delete()
# Call ai save to fix urls
ais = AdditionalInformation.objects.all()
total = ais.count()
for i, ai in enumerate(ais):
print(f"{i + 1}/{total}", end="\r")
ai.save()
class Migration(migrations.Migration):
dependencies = [
("epdb", "0017_additionalinformation"),
]
operations = [
migrations.RunPython(forward_func, reverse_code=migrations.RunPython.noop),
]

View File

@ -0,0 +1,20 @@
# Generated by Django 5.2.7 on 2026-02-23 08:45
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("epdb", "0018_auto_20260220_1203"),
]
operations = [
migrations.RemoveField(
model_name="scenario",
name="additional_information",
),
migrations.RemoveField(
model_name="scenario",
name="parent",
),
]

View File

@ -0,0 +1,65 @@
# Generated by Django 5.2.7 on 2026-03-09 10:41
import django.db.models.deletion
from django.db import migrations, models
def populate_polymorphic_ctype(apps, schema_editor):
ContentType = apps.get_model("contenttypes", "ContentType")
Compound = apps.get_model("epdb", "Compound")
CompoundStructure = apps.get_model("epdb", "CompoundStructure")
# Update Compound records
compound_ct = ContentType.objects.get_for_model(Compound)
Compound.objects.filter(polymorphic_ctype__isnull=True).update(polymorphic_ctype=compound_ct)
# Update CompoundStructure records
compound_structure_ct = ContentType.objects.get_for_model(CompoundStructure)
CompoundStructure.objects.filter(polymorphic_ctype__isnull=True).update(
polymorphic_ctype=compound_structure_ct
)
def reverse_populate_polymorphic_ctype(apps, schema_editor):
Compound = apps.get_model("epdb", "Compound")
CompoundStructure = apps.get_model("epdb", "CompoundStructure")
Compound.objects.all().update(polymorphic_ctype=None)
CompoundStructure.objects.all().update(polymorphic_ctype=None)
class Migration(migrations.Migration):
dependencies = [
("contenttypes", "0002_remove_content_type_name"),
("epdb", "0019_remove_scenario_additional_information_and_more"),
]
operations = [
migrations.AlterModelOptions(
name="compoundstructure",
options={"base_manager_name": "objects"},
),
migrations.AddField(
model_name="compound",
name="polymorphic_ctype",
field=models.ForeignKey(
editable=False,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="polymorphic_%(app_label)s.%(class)s_set+",
to="contenttypes.contenttype",
),
),
migrations.AddField(
model_name="compoundstructure",
name="polymorphic_ctype",
field=models.ForeignKey(
editable=False,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="polymorphic_%(app_label)s.%(class)s_set+",
to="contenttypes.contenttype",
),
),
migrations.RunPython(populate_polymorphic_ctype, reverse_populate_polymorphic_ctype),
]

View File

@ -29,6 +29,8 @@ from polymorphic.models import PolymorphicModel
from sklearn.metrics import jaccard_score, precision_score, recall_score from sklearn.metrics import jaccard_score, precision_score, recall_score
from sklearn.model_selection import ShuffleSplit from sklearn.model_selection import ShuffleSplit
from bridge.contracts import Property
from bridge.dto import RunResult, PredictedProperty
from utilities.chem import FormatConverter, IndigoUtils, PredictionResult, ProductSet from utilities.chem import FormatConverter, IndigoUtils, PredictionResult, ProductSet
from utilities.ml import ( from utilities.ml import (
ApplicabilityDomainPCA, ApplicabilityDomainPCA,
@ -667,6 +669,23 @@ class ScenarioMixin(models.Model):
abstract = True abstract = True
class AdditionalInformationMixin(models.Model):
"""
Optional mixin: lets you do compound.additional_information.all()
without an explicit M2M table.
"""
additional_information = GenericRelation(
"epdb.AdditionalInformation",
content_type_field="content_type",
object_id_field="object_id",
related_query_name="target",
)
class Meta:
abstract = True
class License(models.Model): class License(models.Model):
cc_string = models.TextField(blank=False, null=False, verbose_name="CC string") cc_string = models.TextField(blank=False, null=False, verbose_name="CC string")
link = models.URLField(blank=False, null=False, verbose_name="link") link = models.URLField(blank=False, null=False, verbose_name="link")
@ -745,7 +764,14 @@ class Package(EnviPathModel):
swappable = "EPDB_PACKAGE_MODEL" swappable = "EPDB_PACKAGE_MODEL"
class Compound(EnviPathModel, AliasMixin, ScenarioMixin, ChemicalIdentifierMixin): class Compound(
PolymorphicModel,
EnviPathModel,
AliasMixin,
ScenarioMixin,
ChemicalIdentifierMixin,
AdditionalInformationMixin,
):
package = models.ForeignKey( package = models.ForeignKey(
s.EPDB_PACKAGE_MODEL, verbose_name="Package", on_delete=models.CASCADE, db_index=True s.EPDB_PACKAGE_MODEL, verbose_name="Package", on_delete=models.CASCADE, db_index=True
) )
@ -769,7 +795,7 @@ class Compound(EnviPathModel, AliasMixin, ScenarioMixin, ChemicalIdentifierMixin
num_structs = self.structures.count() num_structs = self.structures.count()
stand_smiles = set() stand_smiles = set()
for structure in self.structures.all(): for structure in self.structures.all():
stand_smiles.add(FormatConverter.standardize(structure.smiles)) stand_smiles.add(FormatConverter.standardize(structure.smiles, remove_stereo=True))
if len(stand_smiles) != 1: if len(stand_smiles) != 1:
logger.debug( logger.debug(
@ -838,7 +864,7 @@ class Compound(EnviPathModel, AliasMixin, ScenarioMixin, ChemicalIdentifierMixin
if parsed is None: if parsed is None:
raise ValueError("Given SMILES is invalid") raise ValueError("Given SMILES is invalid")
standardized_smiles = FormatConverter.standardize(smiles) standardized_smiles = FormatConverter.standardize(smiles, remove_stereo=True)
# Check if we find a direct match for a given SMILES # Check if we find a direct match for a given SMILES
if CompoundStructure.objects.filter(smiles=smiles, compound__package=package).exists(): if CompoundStructure.objects.filter(smiles=smiles, compound__package=package).exists():
@ -911,7 +937,7 @@ class Compound(EnviPathModel, AliasMixin, ScenarioMixin, ChemicalIdentifierMixin
if parsed is None: if parsed is None:
raise ValueError("Given SMILES is invalid") raise ValueError("Given SMILES is invalid")
standardized_smiles = FormatConverter.standardize(smiles) standardized_smiles = FormatConverter.standardize(smiles, remove_stereo=True)
is_standardized = standardized_smiles == smiles is_standardized = standardized_smiles == smiles
@ -1073,7 +1099,14 @@ class Compound(EnviPathModel, AliasMixin, ScenarioMixin, ChemicalIdentifierMixin
unique_together = [("uuid", "package")] unique_together = [("uuid", "package")]
class CompoundStructure(EnviPathModel, AliasMixin, ScenarioMixin, ChemicalIdentifierMixin): class CompoundStructure(
PolymorphicModel,
EnviPathModel,
AliasMixin,
ScenarioMixin,
ChemicalIdentifierMixin,
AdditionalInformationMixin,
):
compound = models.ForeignKey("epdb.Compound", on_delete=models.CASCADE, db_index=True) compound = models.ForeignKey("epdb.Compound", on_delete=models.CASCADE, db_index=True)
smiles = models.TextField(blank=False, null=False, verbose_name="SMILES") smiles = models.TextField(blank=False, null=False, verbose_name="SMILES")
canonical_smiles = models.TextField(blank=False, null=False, verbose_name="Canonical SMILES") canonical_smiles = models.TextField(blank=False, null=False, verbose_name="Canonical SMILES")
@ -1167,10 +1200,11 @@ class CompoundStructure(EnviPathModel, AliasMixin, ScenarioMixin, ChemicalIdenti
hls: Dict[Scenario, List[HalfLife]] = defaultdict(list) hls: Dict[Scenario, List[HalfLife]] = defaultdict(list)
for n in self.related_nodes: for n in self.related_nodes:
for scen in n.scenarios.all().order_by("name"): for ai in n.additional_information.filter(scenario__isnull=False).order_by(
for ai in scen.get_additional_information(): "scenario__name"
if isinstance(ai, HalfLife): ):
hls[scen].append(ai) if isinstance(ai.get(), HalfLife):
hls[ai.scenario].append(ai.get())
return dict(hls) return dict(hls)
@ -1195,7 +1229,7 @@ class EnzymeLink(EnviPathModel, KEGGIdentifierMixin):
return ".".join(self.ec_number.split(".")[:3]) + ".-" return ".".join(self.ec_number.split(".")[:3]) + ".-"
class Rule(PolymorphicModel, EnviPathModel, AliasMixin, ScenarioMixin): class Rule(PolymorphicModel, EnviPathModel, AliasMixin, ScenarioMixin, AdditionalInformationMixin):
package = models.ForeignKey( package = models.ForeignKey(
s.EPDB_PACKAGE_MODEL, verbose_name="Package", on_delete=models.CASCADE, db_index=True s.EPDB_PACKAGE_MODEL, verbose_name="Package", on_delete=models.CASCADE, db_index=True
) )
@ -1424,8 +1458,6 @@ class SimpleRDKitRule(SimpleRule):
return "{}/simple-rdkit-rule/{}".format(self.package.url, self.uuid) return "{}/simple-rdkit-rule/{}".format(self.package.url, self.uuid)
#
#
class ParallelRule(Rule): class ParallelRule(Rule):
simple_rules = models.ManyToManyField("epdb.SimpleRule", verbose_name="Simple rules") simple_rules = models.ManyToManyField("epdb.SimpleRule", verbose_name="Simple rules")
@ -1561,7 +1593,9 @@ class SequentialRuleOrdering(models.Model):
order_index = models.IntegerField(null=False, blank=False) order_index = models.IntegerField(null=False, blank=False)
class Reaction(EnviPathModel, AliasMixin, ScenarioMixin, ReactionIdentifierMixin): class Reaction(
EnviPathModel, AliasMixin, ScenarioMixin, ReactionIdentifierMixin, AdditionalInformationMixin
):
package = models.ForeignKey( package = models.ForeignKey(
s.EPDB_PACKAGE_MODEL, verbose_name="Package", on_delete=models.CASCADE, db_index=True s.EPDB_PACKAGE_MODEL, verbose_name="Package", on_delete=models.CASCADE, db_index=True
) )
@ -1751,13 +1785,13 @@ class Reaction(EnviPathModel, AliasMixin, ScenarioMixin, ReactionIdentifierMixin
edges = Edge.objects.filter(edge_label=self) edges = Edge.objects.filter(edge_label=self)
for e in edges: for e in edges:
for scen in e.scenarios.all(): for scen in e.scenarios.all():
for ai in scen.additional_information.keys(): for ai in scen.get_additional_information():
if ai == "Enzyme": if ai.type == "Enzyme":
res.extend(scen.additional_information[ai]) res.append(ai.get())
return res return res
class Pathway(EnviPathModel, AliasMixin, ScenarioMixin): class Pathway(EnviPathModel, AliasMixin, ScenarioMixin, AdditionalInformationMixin):
package = models.ForeignKey( package = models.ForeignKey(
s.EPDB_PACKAGE_MODEL, verbose_name="Package", on_delete=models.CASCADE, db_index=True s.EPDB_PACKAGE_MODEL, verbose_name="Package", on_delete=models.CASCADE, db_index=True
) )
@ -1842,7 +1876,7 @@ class Pathway(EnviPathModel, AliasMixin, ScenarioMixin):
queue.append(n) queue.append(n)
# Add unconnected nodes # Add unconnected nodes
for n in self.nodes: for n in self.nodes.order_by("url"):
if len(n.out_edges.all()) == 0: if len(n.out_edges.all()) == 0:
if n not in queue: if n not in queue:
queue.append(n) queue.append(n)
@ -1852,8 +1886,8 @@ class Pathway(EnviPathModel, AliasMixin, ScenarioMixin):
processed.add(current) processed.add(current)
nodes.append(current.d3_json()) nodes.append(current.d3_json())
for e in self.edges.filter(start_nodes=current).distinct(): for e in self.edges.filter(start_nodes=current).order_by("url").distinct():
for prod in e.end_nodes.all(): for prod in e.end_nodes.all().order_by("url"):
if prod not in queue and prod not in processed: if prod not in queue and prod not in processed:
queue.append(prod) queue.append(prod)
@ -2011,19 +2045,23 @@ class Pathway(EnviPathModel, AliasMixin, ScenarioMixin):
# Clean for potential XSS # Clean for potential XSS
name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip() name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
is_generic_name = False
if name is None or name == "": if name is None or name == "":
name = f"Pathway {Pathway.objects.filter(package=package).count() + 1}" name = f"Pathway {Pathway.objects.filter(package=package).count() + 1}"
is_generic_name = True
pw.name = name pw.name = name
if description is not None and description.strip() != "": if description is not None and description.strip() != "":
pw.description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip() pw.description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip()
pw.predicted = predicted pw.predicted = predicted
pw.save() pw.save()
try: try:
# create root node # create root node
Node.create(pw, smiles, 0) Node.create(pw, smiles, 0, name=name if not is_generic_name else None)
except ValueError as e: except ValueError as e:
# Node creation failed, most likely due to an invalid smiles # Node creation failed, most likely due to an invalid smiles
# delete this pathway... # delete this pathway...
@ -2136,7 +2174,7 @@ class Pathway(EnviPathModel, AliasMixin, ScenarioMixin):
return Edge.create(self, start_nodes, end_nodes, rule, name=name, description=description) return Edge.create(self, start_nodes, end_nodes, rule, name=name, description=description)
class Node(EnviPathModel, AliasMixin, ScenarioMixin): class Node(EnviPathModel, AliasMixin, ScenarioMixin, AdditionalInformationMixin):
pathway = models.ForeignKey( pathway = models.ForeignKey(
"epdb.Pathway", verbose_name="belongs to", on_delete=models.CASCADE, db_index=True "epdb.Pathway", verbose_name="belongs to", on_delete=models.CASCADE, db_index=True
) )
@ -2171,6 +2209,11 @@ class Node(EnviPathModel, AliasMixin, ScenarioMixin):
def d3_json(self): def d3_json(self):
app_domain_data = self.get_app_domain_assessment_data() app_domain_data = self.get_app_domain_assessment_data()
predicted_properties = defaultdict(list)
for ai in self.additional_information.all():
if isinstance(ai.get(), PredictedProperty):
predicted_properties[ai.get().__class__.__name__].append(ai.data)
return { return {
"depth": self.depth, "depth": self.depth,
"stereo_removed": self.stereo_removed, "stereo_removed": self.stereo_removed,
@ -2189,6 +2232,7 @@ class Node(EnviPathModel, AliasMixin, ScenarioMixin):
else None, else None,
"uncovered_functional_groups": False, "uncovered_functional_groups": False,
}, },
"predicted_properties": predicted_properties,
"is_engineered_intermediate": self.kv.get("is_engineered_intermediate", False), "is_engineered_intermediate": self.kv.get("is_engineered_intermediate", False),
"timeseries": self.get_timeseries_data(), "timeseries": self.get_timeseries_data(),
} }
@ -2206,6 +2250,7 @@ class Node(EnviPathModel, AliasMixin, ScenarioMixin):
if pathway.predicted and FormatConverter.has_stereo(smiles): if pathway.predicted and FormatConverter.has_stereo(smiles):
smiles = FormatConverter.standardize(smiles, remove_stereo=True) smiles = FormatConverter.standardize(smiles, remove_stereo=True)
stereo_removed = True stereo_removed = True
c = Compound.create(pathway.package, smiles, name=name, description=description) c = Compound.create(pathway.package, smiles, name=name, description=description)
if Node.objects.filter(pathway=pathway, default_node_label=c.default_structure).exists(): if Node.objects.filter(pathway=pathway, default_node_label=c.default_structure).exists():
@ -2229,10 +2274,10 @@ class Node(EnviPathModel, AliasMixin, ScenarioMixin):
return IndigoUtils.mol_to_svg(self.default_node_label.smiles) return IndigoUtils.mol_to_svg(self.default_node_label.smiles)
def get_timeseries_data(self): def get_timeseries_data(self):
for scenario in self.scenarios.all(): for ai in self.additional_information.all():
for ai in scenario.get_additional_information():
if ai.__class__.__name__ == "OECD301FTimeSeries": if ai.__class__.__name__ == "OECD301FTimeSeries":
return ai.model_dump(mode="json") return ai.model_dump(mode="json")
return None return None
def get_app_domain_assessment_data(self): def get_app_domain_assessment_data(self):
@ -2263,7 +2308,7 @@ class Node(EnviPathModel, AliasMixin, ScenarioMixin):
return res return res
class Edge(EnviPathModel, AliasMixin, ScenarioMixin): class Edge(EnviPathModel, AliasMixin, ScenarioMixin, AdditionalInformationMixin):
pathway = models.ForeignKey( pathway = models.ForeignKey(
"epdb.Pathway", verbose_name="belongs to", on_delete=models.CASCADE, db_index=True "epdb.Pathway", verbose_name="belongs to", on_delete=models.CASCADE, db_index=True
) )
@ -2299,7 +2344,10 @@ class Edge(EnviPathModel, AliasMixin, ScenarioMixin):
"reaction_probability": self.kv.get("probability"), "reaction_probability": self.kv.get("probability"),
"start_node_urls": [x.url for x in self.start_nodes.all()], "start_node_urls": [x.url for x in self.start_nodes.all()],
"end_node_urls": [x.url for x in self.end_nodes.all()], "end_node_urls": [x.url for x in self.end_nodes.all()],
"scenarios": [{"name": s.get_name(), "url": s.url} for s in self.scenarios.all()], "scenarios": [
{"name": s.get_name(), "url": s.url, "review_status": s.package.reviewed}
for s in self.scenarios.all()
],
} }
for n in self.start_nodes.all(): for n in self.start_nodes.all():
@ -2405,38 +2453,11 @@ class Edge(EnviPathModel, AliasMixin, ScenarioMixin):
) )
class EPModel(PolymorphicModel, EnviPathModel): class EPModel(PolymorphicModel, EnviPathModel, AdditionalInformationMixin):
package = models.ForeignKey( package = models.ForeignKey(
s.EPDB_PACKAGE_MODEL, verbose_name="Package", on_delete=models.CASCADE, db_index=True s.EPDB_PACKAGE_MODEL, verbose_name="Package", on_delete=models.CASCADE, db_index=True
) )
def _url(self):
return "{}/model/{}".format(self.package.url, self.uuid)
class PackageBasedModel(EPModel):
rule_packages = models.ManyToManyField(
s.EPDB_PACKAGE_MODEL,
verbose_name="Rule Packages",
related_name="%(app_label)s_%(class)s_rule_packages",
)
data_packages = models.ManyToManyField(
s.EPDB_PACKAGE_MODEL,
verbose_name="Data Packages",
related_name="%(app_label)s_%(class)s_data_packages",
)
eval_packages = models.ManyToManyField(
s.EPDB_PACKAGE_MODEL,
verbose_name="Evaluation Packages",
related_name="%(app_label)s_%(class)s_eval_packages",
)
threshold = models.FloatField(null=False, blank=False, default=0.5)
eval_results = JSONField(null=True, blank=True, default=dict)
app_domain = models.ForeignKey(
"epdb.ApplicabilityDomain", on_delete=models.SET_NULL, null=True, blank=True, default=None
)
multigen_eval = models.BooleanField(null=False, blank=False, default=False)
INITIAL = "INITIAL" INITIAL = "INITIAL"
INITIALIZING = "INITIALIZING" INITIALIZING = "INITIALIZING"
BUILDING = "BUILDING" BUILDING = "BUILDING"
@ -2463,6 +2484,35 @@ class PackageBasedModel(EPModel):
def ready_for_prediction(self) -> bool: def ready_for_prediction(self) -> bool:
return self.model_status in [self.BUILT_NOT_EVALUATED, self.EVALUATING, self.FINISHED] return self.model_status in [self.BUILT_NOT_EVALUATED, self.EVALUATING, self.FINISHED]
def _url(self):
return "{}/model/{}".format(self.package.url, self.uuid)
class PackageBasedModel(EPModel):
rule_packages = models.ManyToManyField(
s.EPDB_PACKAGE_MODEL,
verbose_name="Rule Packages",
related_name="%(app_label)s_%(class)s_rule_packages",
blank=True,
)
data_packages = models.ManyToManyField(
s.EPDB_PACKAGE_MODEL,
verbose_name="Data Packages",
related_name="%(app_label)s_%(class)s_data_packages",
)
eval_packages = models.ManyToManyField(
s.EPDB_PACKAGE_MODEL,
verbose_name="Evaluation Packages",
related_name="%(app_label)s_%(class)s_eval_packages",
blank=True,
)
threshold = models.FloatField(null=False, blank=False, default=0.5)
eval_results = JSONField(null=True, blank=True, default=dict)
app_domain = models.ForeignKey(
"epdb.ApplicabilityDomain", on_delete=models.SET_NULL, null=True, blank=True, default=None
)
multigen_eval = models.BooleanField(null=False, blank=False, default=False)
@property @property
def pr_curve(self): def pr_curve(self):
if self.model_status != self.FINISHED: if self.model_status != self.FINISHED:
@ -3007,7 +3057,7 @@ class RuleBasedRelativeReasoning(PackageBasedModel):
mod = joblib.load(os.path.join(s.MODEL_DIR, f"{self.uuid}_mod.pkl")) mod = joblib.load(os.path.join(s.MODEL_DIR, f"{self.uuid}_mod.pkl"))
return mod return mod
def predict(self, smiles) -> List["PredictionResult"]: def predict(self, smiles, *args, **kwargs) -> List["PredictionResult"]:
start = datetime.now() start = datetime.now()
ds = self.load_dataset() ds = self.load_dataset()
classify_ds, classify_prods = ds.classification_dataset([smiles], self.applicable_rules) classify_ds, classify_prods = ds.classification_dataset([smiles], self.applicable_rules)
@ -3107,7 +3157,7 @@ class MLRelativeReasoning(PackageBasedModel):
mod.base_clf.n_jobs = -1 mod.base_clf.n_jobs = -1
return mod return mod
def predict(self, smiles) -> List["PredictionResult"]: def predict(self, smiles, *args, **kwargs) -> List["PredictionResult"]:
start = datetime.now() start = datetime.now()
ds = self.load_dataset() ds = self.load_dataset()
classify_ds, classify_prods = ds.classification_dataset([smiles], self.applicable_rules) classify_ds, classify_prods = ds.classification_dataset([smiles], self.applicable_rules)
@ -3415,16 +3465,14 @@ class EnviFormer(PackageBasedModel):
mod = load(device=s.ENVIFORMER_DEVICE, ckpt_path=ckpt) mod = load(device=s.ENVIFORMER_DEVICE, ckpt_path=ckpt)
return mod return mod
def predict(self, smiles) -> List["PredictionResult"]: def predict(self, smiles, *args, **kwargs) -> List["PredictionResult"]:
return self.predict_batch([smiles])[0] return self.predict_batch([smiles])[0]
def predict_batch(self, smiles_list): def predict_batch(self, smiles: List[str], *args, **kwargs):
# Standardizer removes all but one compound from a raw SMILES string, so they need to be processed separately # Standardizer removes all but one compound from a raw SMILES string, so they need to be processed separately
canon_smiles = [ canon_smiles = [
".".join( ".".join([FormatConverter.standardize(s, remove_stereo=True) for s in smi.split(".")])
[FormatConverter.standardize(s, remove_stereo=True) for s in smiles.split(".")] for smi in smiles
)
for smiles in smiles_list
] ]
logger.info(f"Submitting {canon_smiles} to {self.get_name()}") logger.info(f"Submitting {canon_smiles} to {self.get_name()}")
start = datetime.now() start = datetime.now()
@ -3445,10 +3493,17 @@ class EnviFormer(PackageBasedModel):
for smile in smi.split(".") for smile in smi.split(".")
] ]
) )
if smi in canon_smiles:
logger.debug(f"Found input SMILES={smi} in prediction results. Skipping...")
continue
except ValueError: # This occurs when the predicted string is an invalid SMILES except ValueError: # This occurs when the predicted string is an invalid SMILES
logging.info(f"EnviFormer predicted an invalid SMILES: {smi}") logging.info(f"EnviFormer predicted an invalid SMILES: {smi}")
continue continue
res.append(PredictionResult([ProductSet([smi])], prob, None)) res.append(PredictionResult([ProductSet([smi])], prob, None))
results.append(res) results.append(res)
return results return results
@ -3587,7 +3642,7 @@ class EnviFormer(PackageBasedModel):
) )
root_node = ".".join( root_node = ".".join(
[ [
FormatConverter.standardize(smile) FormatConverter.standardize(smile, remove_stereo=True)
for smile in root_node[0].default_node_label.smiles.split(".") for smile in root_node[0].default_node_label.smiles.split(".")
] ]
) )
@ -3766,8 +3821,216 @@ class EnviFormer(PackageBasedModel):
return [] return []
class PluginModel(EPModel): class PropertyPluginModel(PackageBasedModel):
pass plugin_identifier = models.CharField(max_length=255)
rule_packages = models.ManyToManyField(
s.EPDB_PACKAGE_MODEL,
verbose_name="Rule Packages",
related_name="%(app_label)s_%(class)s_rule_packages",
blank=True,
)
data_packages = models.ManyToManyField(
s.EPDB_PACKAGE_MODEL,
verbose_name="Data Packages",
related_name="%(app_label)s_%(class)s_data_packages",
blank=True,
)
eval_packages = models.ManyToManyField(
s.EPDB_PACKAGE_MODEL,
verbose_name="Evaluation Packages",
related_name="%(app_label)s_%(class)s_eval_packages",
blank=True,
)
@staticmethod
@transaction.atomic
def create(
package: "Package",
plugin_identifier: str,
rule_packages: List["Package"] | None,
data_packages: List["Package"] | None,
name: "str" = None,
description: str = None,
):
mod = PropertyPluginModel()
mod.package = package
# Clean for potential XSS
if name is not None:
name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
if name is None or name == "":
name = f"PropertyPluginModel {PropertyPluginModel.objects.filter(package=package).count() + 1}"
mod.name = name
if description is not None and description.strip() != "":
mod.description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip()
if plugin_identifier is None:
raise ValueError("Plugin identifier must be set")
impl = s.PROPERTY_PLUGINS.get(plugin_identifier, None)
if impl is None:
raise ValueError(f"Unknown plugin identifier: {plugin_identifier}")
inst = impl()
mod.plugin_identifier = plugin_identifier
if inst.requires_rule_packages() and (rule_packages is None or len(rule_packages) == 0):
raise ValueError("Plugin requires rules but none were provided")
elif not inst.requires_rule_packages() and (
rule_packages is not None and len(rule_packages) > 0
):
raise ValueError("Plugin does not require rules but some were provided")
if rule_packages is None:
rule_packages = []
if inst.requires_data_packages() and (data_packages is None or len(data_packages) == 0):
raise ValueError("Plugin requires data but none were provided")
elif not inst.requires_data_packages() and (
data_packages is not None and len(data_packages) > 0
):
raise ValueError("Plugin does not require data but some were provided")
if data_packages is None:
data_packages = []
mod.save()
for p in rule_packages:
mod.rule_packages.add(p)
for p in data_packages:
mod.data_packages.add(p)
mod.save()
return mod
def instance(self) -> "Property":
"""
Returns an instance of the plugin implementation.
This method retrieves the implementation of the plugin identified by
`self.plugin_identifier` from the `PROPERTY_PLUGINS` mapping, then
instantiates and returns it.
Returns:
object: An instance of the plugin implementation.
"""
impl = s.PROPERTY_PLUGINS[self.plugin_identifier]
instance = impl()
return instance
def build_dataset(self):
"""
Required by general model contract but actual implementation resides in plugin.
"""
return
def build_model(self):
from bridge.dto import BaseDTO
self.model_status = self.BUILDING
self.save()
compounds = CompoundStructure.objects.filter(compound__package__in=self.data_packages.all())
reactions = Reaction.objects.filter(package__in=self.data_packages.all())
rules = Rule.objects.filter(package__in=self.rule_packages.all())
eP = BaseDTO(str(self.uuid), self.url, s.MODEL_DIR, compounds, reactions, rules)
instance = self.instance()
_ = instance.build(eP)
self.model_status = self.BUILT_NOT_EVALUATED
self.save()
def predict(self, smiles, *args, **kwargs) -> RunResult:
return self.predict_batch([smiles], *args, **kwargs)
def predict_batch(self, smiles: List[str], *args, **kwargs) -> RunResult:
from bridge.dto import BaseDTO, CompoundProto
from dataclasses import dataclass
@dataclass(frozen=True, slots=True)
class TempCompound(CompoundProto):
url = None
name = None
smiles: str
batch = [TempCompound(smiles=smi) for smi in smiles]
reactions = Reaction.objects.filter(package__in=self.data_packages.all())
rules = Rule.objects.filter(package__in=self.rule_packages.all())
eP = BaseDTO(str(self.uuid), self.url, s.MODEL_DIR, batch, reactions, rules)
instance = self.instance()
return instance.run(eP, *args, **kwargs)
def evaluate_model(self, multigen: bool, eval_packages: List["Package"] = None, **kwargs):
from bridge.dto import BaseDTO
if self.model_status != self.BUILT_NOT_EVALUATED:
raise ValueError("Model must be built before evaluation")
self.model_status = self.EVALUATING
self.save()
if eval_packages is not None:
for p in eval_packages:
self.eval_packages.add(p)
rules = Rule.objects.filter(package__in=self.rule_packages.all())
if self.eval_packages.count() > 0:
reactions = Reaction.objects.filter(package__in=self.data_packages.all())
compounds = CompoundStructure.objects.filter(
compound__package__in=self.data_packages.all()
)
else:
reactions = Reaction.objects.filter(package__in=self.eval_packages.all())
compounds = CompoundStructure.objects.filter(
compound__package__in=self.eval_packages.all()
)
eP = BaseDTO(str(self.uuid), self.url, s.MODEL_DIR, compounds, reactions, rules)
instance = self.instance()
try:
if self.eval_packages.count() > 0:
res = instance.evaluate(eP, **kwargs)
self.eval_results = res.data
else:
res = instance.build_and_evaluate(eP)
self.eval_results = self.compute_averages(res.data)
self.model_status = self.FINISHED
self.save()
except Exception as e:
logger.error(f"Error during evaluation: {type(e).__name__}, {e}")
self.model_status = self.ERROR
self.save()
return res
@staticmethod
def compute_averages(data):
sum_dict = {}
for result in data:
for key, value in result.items():
sum_dict.setdefault(key, []).append(value)
sum_dict = {k: sum(v) / len(data) for k, v in sum_dict.items()}
return sum_dict
class Scenario(EnviPathModel): class Scenario(EnviPathModel):
@ -3779,11 +4042,6 @@ class Scenario(EnviPathModel):
max_length=256, null=False, blank=False, default="Not specified" max_length=256, null=False, blank=False, default="Not specified"
) )
# for Referring Scenarios this property will be filled
parent = models.ForeignKey("self", on_delete=models.CASCADE, default=None, null=True)
additional_information = models.JSONField(verbose_name="Additional Information")
def _url(self): def _url(self):
return "{}/scenario/{}".format(self.package.url, self.uuid) return "{}/scenario/{}".format(self.package.url, self.uuid)
@ -3799,11 +4057,14 @@ class Scenario(EnviPathModel):
): ):
new_s = Scenario() new_s = Scenario()
new_s.package = package new_s.package = package
if name is not None: if name is not None:
# Clean for potential XSS # Clean for potential XSS
name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip() name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
if name is None or name == "": if name is None or name == "":
name = f"Scenario {Scenario.objects.filter(package=package).count() + 1}" name = f"Scenario {Scenario.objects.filter(package=package).count() + 1}"
new_s.name = name new_s.name = name
if description is not None and description.strip() != "": if description is not None and description.strip() != "":
@ -3815,19 +4076,14 @@ class Scenario(EnviPathModel):
if scenario_type is not None and scenario_type.strip() != "": if scenario_type is not None and scenario_type.strip() != "":
new_s.scenario_type = scenario_type new_s.scenario_type = scenario_type
add_inf = defaultdict(list) # TODO Remove
new_s.additional_information = {}
for info in additional_information:
cls_name = info.__class__.__name__
# Clean for potential XSS hidden in the additional information fields.
ai_data = json.loads(nh3.clean(info.model_dump_json()).strip())
ai_data["uuid"] = f"{uuid4()}"
add_inf[cls_name].append(ai_data)
new_s.additional_information = add_inf
new_s.save() new_s.save()
for ai in additional_information:
AdditionalInformation.create(package, ai, scenario=new_s)
return new_s return new_s
@transaction.atomic @transaction.atomic
@ -3841,19 +4097,9 @@ class Scenario(EnviPathModel):
Returns: Returns:
str: UUID of the created item str: UUID of the created item
""" """
cls_name = data.__class__.__name__ ai = AdditionalInformation.create(self.package, ai=data, scenario=self)
# Clean for potential XSS hidden in the additional information fields.
ai_data = json.loads(nh3.clean(data.model_dump_json()).strip())
generated_uuid = str(uuid4())
ai_data["uuid"] = generated_uuid
if cls_name not in self.additional_information: return str(ai.uuid)
self.additional_information[cls_name] = []
self.additional_information[cls_name].append(ai_data)
self.save()
return generated_uuid
@transaction.atomic @transaction.atomic
def update_additional_information(self, ai_uuid: str, data: "EnviPyModel") -> None: def update_additional_information(self, ai_uuid: str, data: "EnviPyModel") -> None:
@ -3867,110 +4113,158 @@ class Scenario(EnviPathModel):
Raises: Raises:
ValueError: If item with given UUID not found or type mismatch ValueError: If item with given UUID not found or type mismatch
""" """
found_type = None ai = AdditionalInformation.objects.filter(uuid=ai_uuid, scenario=self)
found_idx = -1
# Find the item by UUID
for type_name, items in self.additional_information.items():
for idx, item_data in enumerate(items):
if item_data.get("uuid") == ai_uuid:
found_type = type_name
found_idx = idx
break
if found_type:
break
if found_type is None:
raise ValueError(f"Additional information with UUID {ai_uuid} not found")
if ai.exists() and ai.count() == 1:
ai = ai.first()
# Verify the model type matches (prevent type changes) # Verify the model type matches (prevent type changes)
new_type = data.__class__.__name__ new_type = data.__class__.__name__
if new_type != found_type: if new_type != ai.type:
raise ValueError( raise ValueError(
f"Cannot change type from {found_type} to {new_type}. " f"Cannot change type from {ai.type} to {new_type}. "
f"Delete and create a new item instead." f"Delete and create a new item instead."
) )
# Update the item data, preserving UUID ai.data = data.__class__(
ai_data = json.loads(nh3.clean(data.model_dump_json()).strip()) **json.loads(nh3.clean(data.model_dump_json()).strip())
ai_data["uuid"] = ai_uuid ).model_dump(mode="json")
ai.save()
self.additional_information[found_type][found_idx] = ai_data else:
self.save() raise ValueError(f"Additional information with UUID {ai_uuid} not found")
@transaction.atomic @transaction.atomic
def remove_additional_information(self, ai_uuid): def remove_additional_information(self, ai_uuid):
found_type = None ai = AdditionalInformation.objects.filter(uuid=ai_uuid, scenario=self)
found_idx = -1
for k, vals in self.additional_information.items(): if ai.exists() and ai.count() == 1:
for i, v in enumerate(vals): ai.delete()
if v["uuid"] == ai_uuid:
found_type = k
found_idx = i
break
if found_type is not None and found_idx >= 0:
if len(self.additional_information[found_type]) == 1:
del self.additional_information[found_type]
else:
self.additional_information[found_type].pop(found_idx)
self.save()
else: else:
raise ValueError(f"Could not find additional information with uuid {ai_uuid}") raise ValueError(f"Could not find additional information with uuid {ai_uuid}")
@transaction.atomic @transaction.atomic
def set_additional_information(self, data: Dict[str, "EnviPyModel"]): def set_additional_information(self, data: Dict[str, "EnviPyModel"]):
new_ais = defaultdict(list) raise NotImplementedError("Not implemented yet")
for k, vals in data.items():
for v in vals: def get_additional_information(self, direct_only=True):
# Clean for potential XSS hidden in the additional information fields. ais = AdditionalInformation.objects.filter(scenario=self)
ai_data = json.loads(nh3.clean(v.model_dump_json()).strip())
if hasattr(v, "uuid"): if direct_only:
ai_data["uuid"] = str(v.uuid) return ais.filter(object_id__isnull=True)
else: else:
ai_data["uuid"] = str(uuid4()) return ais
new_ais[k].append(ai_data)
self.additional_information = new_ais
self.save()
def get_additional_information(self):
from envipy_additional_information import registry
for k, vals in self.additional_information.items():
if k == "enzyme":
continue
for v in vals:
# Per default additional fields are ignored
MAPPING = {c.__name__: c for c in registry.list_models().values()}
try:
inst = MAPPING[k](**v)
except Exception as e:
logger.error(f"Could not load additional information {k}: {e}")
if s.SENTRY_ENABLED:
from sentry_sdk import capture_exception
capture_exception(e)
# Add uuid to uniquely identify objects for manipulation
if "uuid" in v:
inst.__dict__["uuid"] = v["uuid"]
yield inst
def related_pathways(self): def related_pathways(self):
scens = [self]
if self.parent is not None:
scens.append(self.parent)
return Pathway.objects.filter( return Pathway.objects.filter(
scenarios__in=scens, package__reviewed=True, package=self.package scenarios=self, package__reviewed=True, package=self.package
).distinct() ).distinct()
class AdditionalInformation(models.Model):
package = models.ForeignKey(
s.EPDB_PACKAGE_MODEL, verbose_name="Package", on_delete=models.CASCADE, db_index=True
)
uuid = models.UUIDField(unique=True, default=uuid4, editable=False)
url = models.TextField(blank=False, null=True, verbose_name="URL", unique=True)
kv = JSONField(null=True, blank=True, default=dict)
# class name of pydantic model
type = models.TextField(blank=False, null=False, verbose_name="Additional Information Type")
# serialized pydantic model
data = models.JSONField(null=True, blank=True, default=dict)
# The link to scenario is optional - e.g. when setting predicted properties to objects
scenario = models.ForeignKey(
"epdb.Scenario",
null=True,
blank=True,
on_delete=models.CASCADE,
related_name="scenario_additional_information",
)
# Generic target (Compound/Reaction/Pathway/...)
content_type = models.ForeignKey(ContentType, null=True, blank=True, on_delete=models.CASCADE)
object_id = models.PositiveBigIntegerField(null=True, blank=True)
content_object = GenericForeignKey("content_type", "object_id")
@staticmethod
def create(
package: "Package",
ai: "EnviPyModel",
scenario=None,
content_object=None,
skip_cleaning=False,
):
add_inf = AdditionalInformation()
add_inf.package = package
add_inf.type = ai.__class__.__name__
# dump, sanitize, validate before saving
_ai = ai.__class__(**json.loads(nh3.clean(ai.model_dump_json()).strip()))
add_inf.data = _ai.model_dump(mode="json")
if scenario is not None:
add_inf.scenario = scenario
if content_object is not None:
add_inf.content_object = content_object
add_inf.save()
return add_inf
def save(self, *args, **kwargs):
if not self.url:
self.url = self._url()
super().save(*args, **kwargs)
def _url(self):
if self.content_object is not None:
return f"{self.content_object.url}/additional-information/{self.uuid}"
return f"{self.scenario.url}/additional-information/{self.uuid}"
def get(self) -> "EnviPyModel":
from envipy_additional_information import registry
MAPPING = {c.__name__: c for c in registry.list_models().values()}
try:
inst = MAPPING[self.type](**self.data)
except Exception as e:
print(f"Error loading {self.type}: {e}")
raise e
inst.__dict__["uuid"] = str(self.uuid)
return inst
def __str__(self) -> str:
return f"{self.type} ({self.uuid})"
class Meta:
indexes = [
models.Index(fields=["type"]),
models.Index(fields=["scenario", "type"]),
models.Index(fields=["content_type", "object_id"]),
models.Index(fields=["scenario", "content_type", "object_id"]),
]
constraints = [
# Generic FK must be complete or empty
models.CheckConstraint(
name="ck_addinfo_gfk_pair",
check=(
(Q(content_type__isnull=True) & Q(object_id__isnull=True))
| (Q(content_type__isnull=False) & Q(object_id__isnull=False))
),
),
# Disallow "floating" info
models.CheckConstraint(
name="ck_addinfo_not_both_null",
check=Q(scenario__isnull=False) | Q(content_type__isnull=False),
),
]
class UserSettingPermission(Permission): class UserSettingPermission(Permission):
uuid = models.UUIDField( uuid = models.UUIDField(
null=False, blank=False, verbose_name="UUID of this object", primary_key=True, default=uuid4 null=False, blank=False, verbose_name="UUID of this object", primary_key=True, default=uuid4
@ -4017,6 +4311,13 @@ class Setting(EnviPathModel):
null=True, blank=True, verbose_name="Setting Model Threshold", default=0.25 null=True, blank=True, verbose_name="Setting Model Threshold", default=0.25
) )
property_models = models.ManyToManyField(
"PropertyPluginModel",
verbose_name="Setting Property Models",
related_name="settings",
blank=True,
)
expansion_scheme = models.CharField( expansion_scheme = models.CharField(
max_length=20, max_length=20,
choices=ExpansionSchemeChoice.choices, choices=ExpansionSchemeChoice.choices,

View File

@ -11,7 +11,17 @@ from django.core.mail import EmailMultiAlternatives
from django.utils import timezone from django.utils import timezone
from epdb.logic import SPathway from epdb.logic import SPathway
from epdb.models import Edge, EPModel, JobLog, Node, Pathway, Rule, Setting, User from epdb.models import (
AdditionalInformation,
Edge,
EPModel,
JobLog,
Node,
Pathway,
Rule,
Setting,
User,
)
from utilities.chem import FormatConverter from utilities.chem import FormatConverter
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -66,9 +76,9 @@ def mul(a, b):
@shared_task(queue="predict") @shared_task(queue="predict")
def predict_simple(model_pk: int, smiles: str): def predict_simple(model_pk: int, smiles: str, *args, **kwargs):
mod = get_ml_model(model_pk) mod = get_ml_model(model_pk)
res = mod.predict(smiles) res = mod.predict(smiles, *args, **kwargs)
return res return res
@ -229,9 +239,28 @@ def predict(
if JobLog.objects.filter(task_id=self.request.id).exists(): if JobLog.objects.filter(task_id=self.request.id).exists():
JobLog.objects.filter(task_id=self.request.id).update(status="SUCCESS", task_result=pw.url) JobLog.objects.filter(task_id=self.request.id).update(status="SUCCESS", task_result=pw.url)
# dispatch property job
compute_properties.delay(pw_pk, pred_setting_pk)
return pw.url return pw.url
@shared_task(bind=True, queue="background")
def compute_properties(self, pathway_pk: int, setting_pk: int):
pw = Pathway.objects.get(id=pathway_pk)
setting = Setting.objects.get(id=setting_pk)
nodes = [n for n in pw.nodes]
smiles = [n.default_node_label.smiles for n in nodes]
for prop_mod in setting.property_models.all():
if prop_mod.instance().is_heavy():
rr = prop_mod.predict_batch(smiles)
for idx, pred in enumerate(rr.result):
n = nodes[idx]
_ = AdditionalInformation.create(pw.package, ai=pred, content_object=n)
@shared_task(bind=True, queue="background") @shared_task(bind=True, queue="background")
def identify_missing_rules( def identify_missing_rules(
self, self,
@ -420,7 +449,7 @@ def batch_predict(
standardized_substrates_and_smiles = [] standardized_substrates_and_smiles = []
for substrate in substrate_and_names: for substrate in substrate_and_names:
try: try:
stand_smiles = FormatConverter.standardize(substrate[0]) stand_smiles = FormatConverter.standardize(substrate[0], remove_stereo=True)
standardized_substrates_and_smiles.append([stand_smiles, substrate[1]]) standardized_substrates_and_smiles.append([stand_smiles, substrate[1]])
except ValueError: except ValueError:
raise ValueError( raise ValueError(

View File

@ -1,12 +1,14 @@
import json import json
import logging import logging
from datetime import datetime from datetime import datetime
from typing import Any, Dict, List from typing import Any, Dict, List, Iterable
import nh3 import nh3
from django.conf import settings as s from django.conf import settings as s
from django.contrib.auth import get_user_model from django.contrib.auth import get_user_model
from django.core.exceptions import BadRequest, PermissionDenied from django.contrib.auth.validators import UnicodeUsernameValidator
from django.core.exceptions import BadRequest, PermissionDenied, ValidationError
from django.core.validators import validate_email
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, JsonResponse from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, JsonResponse
from django.shortcuts import get_object_or_404, redirect, render from django.shortcuts import get_object_or_404, redirect, render
from django.urls import reverse from django.urls import reverse
@ -26,6 +28,7 @@ from .logic import (
UserManager, UserManager,
) )
from .models import ( from .models import (
AdditionalInformation,
APIToken, APIToken,
Compound, Compound,
CompoundStructure, CompoundStructure,
@ -44,6 +47,7 @@ from .models import (
Node, Node,
Pathway, Pathway,
Permission, Permission,
PropertyPluginModel,
Reaction, Reaction,
Rule, Rule,
RuleBasedRelativeReasoning, RuleBasedRelativeReasoning,
@ -159,15 +163,28 @@ def login(request):
password = request.POST.get("password") password = request.POST.get("password")
# Get email for username and check if the account is active # Get email for username and check if the account is active
try:
# Try username and if it fails check if username is a valid email adress and we'll find a user
try: try:
temp_user = get_user_model().objects.get(username=username) temp_user = get_user_model().objects.get(username=username)
except get_user_model().DoesNotExist as e:
# validate_email returns None if input is valid -> check for None
# Otherwise a ValidationError is raised
if validate_email(username) is None:
temp_user = get_user_model().objects.get(email=username)
else:
raise e
if not temp_user.is_active: if not temp_user.is_active:
context["message"] = "User account is not activated yet!" context["message"] = "User account is not activated yet!"
return render(request, "static/login.html", context) return render(request, "static/login.html", context)
email = temp_user.email email = temp_user.email
except get_user_model().DoesNotExist: except (get_user_model().DoesNotExist, ValidationError):
context["message"] = "Login failed!"
return render(request, "static/login.html", context)
except Exception as e:
logger.info(f"Uncaught exception while trying to login: {e}")
context["message"] = "Login failed!" context["message"] = "Login failed!"
return render(request, "static/login.html", context) return render(request, "static/login.html", context)
@ -230,6 +247,15 @@ def register(request):
context["message"] = "Invalid username/email/password" context["message"] = "Invalid username/email/password"
return render(request, "static/login.html", context) return render(request, "static/login.html", context)
try:
UnicodeUsernameValidator()(username)
except ValidationError:
context["message"] = (
"Enter a valid username. This value may contain only letters, "
"numbers, and @/./+/-/_ characters."
)
return render(request, "static/login.html", context)
if password != rpassword or password == "": if password != rpassword or password == "":
context["message"] = "Registration failed, provided passwords differ!" context["message"] = "Registration failed, provided passwords differ!"
return render(request, "static/login.html", context) return render(request, "static/login.html", context)
@ -377,7 +403,7 @@ def breadcrumbs(
def set_scenarios(current_user, attach_object, scenario_urls: List[str]): def set_scenarios(current_user, attach_object, scenario_urls: List[str]):
scens = [] scens = []
for scenario_url in scenario_urls: for scenario_url in scenario_urls:
# As empty lists will be removed in POST request well send [''] # As empty lists will be removed in POST request we'll send ['']
if scenario_url == "": if scenario_url == "":
continue continue
@ -389,6 +415,7 @@ def set_scenarios(current_user, attach_object, scenario_urls: List[str]):
def set_aliases(current_user, attach_object, aliases: List[str]): def set_aliases(current_user, attach_object, aliases: List[str]):
# As empty lists will be removed in POST request we'll send ['']
if aliases == [""]: if aliases == [""]:
aliases = [] aliases = []
@ -397,7 +424,7 @@ def set_aliases(current_user, attach_object, aliases: List[str]):
def copy_object(current_user, target_package: "Package", source_object_url: str): def copy_object(current_user, target_package: "Package", source_object_url: str):
# Ensures that source is readable # Ensures that source object is readable
source_package = PackageManager.get_package_by_url(current_user, source_object_url) source_package = PackageManager.get_package_by_url(current_user, source_object_url)
if source_package == target_package: if source_package == target_package:
@ -405,7 +432,7 @@ def copy_object(current_user, target_package: "Package", source_object_url: str)
parser = EPDBURLParser(source_object_url) parser = EPDBURLParser(source_object_url)
# if the url won't contain a package or is a plain package # if the url don't contain a package or is a plain package
if not parser.contains_package_url(): if not parser.contains_package_url():
raise ValueError(f"Object {source_object_url} can't be copied!") raise ValueError(f"Object {source_object_url} can't be copied!")
@ -690,12 +717,36 @@ def models(request):
# Keep model_types for potential modal/action use # Keep model_types for potential modal/action use
context["model_types"] = { context["model_types"] = {
"ML Relative Reasoning": "ml-relative-reasoning", "ML Relative Reasoning": {
"Rule Based Relative Reasoning": "rule-based-relative-reasoning", "type": "ml-relative-reasoning",
"EnviFormer": "enviformer", "requires_rule_packages": True,
"requires_data_packages": True,
},
"Rule Based Relative Reasoning": {
"type": "rule-based-relative-reasoning",
"requires_rule_packages": True,
"requires_data_packages": True,
},
"EnviFormer": {
"type": "enviformer",
"requires_rule_packages": False,
"requires_data_packages": True,
},
} }
if s.FLAGS.get("PLUGINS", False):
for k, v in s.CLASSIFIER_PLUGINS.items(): for k, v in s.CLASSIFIER_PLUGINS.items():
context["model_types"][v.display()] = k context["model_types"][v().display()] = {
"type": k,
"requires_rule_packages": True,
"requires_data_packages": True,
}
for k, v in s.PROPERTY_PLUGINS.items():
context["model_types"][v().display()] = {
"type": k,
"requires_rule_packages": v().requires_rule_packages,
"requires_data_packages": v().requires_data_packages,
}
# Context for paginated template # Context for paginated template
context["entity_type"] = "model" context["entity_type"] = "model"
@ -806,16 +857,36 @@ def package_models(request, package_uuid):
) )
context["model_types"] = { context["model_types"] = {
"ML Relative Reasoning": "mlrr", "ML Relative Reasoning": {
"Rule Based Relative Reasoning": "rbrr", "type": "ml-relative-reasoning",
"requires_rule_packages": True,
"requires_data_packages": True,
},
"Rule Based Relative Reasoning": {
"type": "rule-based-relative-reasoning",
"requires_rule_packages": True,
"requires_data_packages": True,
},
"EnviFormer": {
"type": "enviformer",
"requires_rule_packages": False,
"requires_data_packages": True,
},
} }
if s.FLAGS.get("ENVIFORMER", False):
context["model_types"]["EnviFormer"] = "enviformer"
if s.FLAGS.get("PLUGINS", False): if s.FLAGS.get("PLUGINS", False):
for k, v in s.CLASSIFIER_PLUGINS.items(): for k, v in s.CLASSIFIER_PLUGINS.items():
context["model_types"][v.display()] = k context["model_types"][v().display()] = {
"type": k,
"requires_rule_packages": True,
"requires_data_packages": True,
}
for k, v in s.PROPERTY_PLUGINS.items():
context["model_types"][v().display()] = {
"type": k,
"requires_rule_packages": v().requires_rule_packages,
"requires_data_packages": v().requires_data_packages,
}
return render(request, "collections/models_paginated.html", context) return render(request, "collections/models_paginated.html", context)
@ -846,7 +917,7 @@ def package_models(request, package_uuid):
params["threshold"] = threshold params["threshold"] = threshold
mod = EnviFormer.create(**params) mod = EnviFormer.create(**params)
elif model_type == "mlrr": elif model_type == "ml-relative-reasoning":
# ML Specific # ML Specific
threshold = float(request.POST.get("model-threshold", 0.5)) threshold = float(request.POST.get("model-threshold", 0.5))
# TODO handle additional fingerprinter # TODO handle additional fingerprinter
@ -870,14 +941,30 @@ def package_models(request, package_uuid):
params["app_domain_local_compatibility_threshold"] = local_compatibility_threshold params["app_domain_local_compatibility_threshold"] = local_compatibility_threshold
mod = MLRelativeReasoning.create(**params) mod = MLRelativeReasoning.create(**params)
elif model_type == "rbrr": elif model_type == "rule-based-relative-reasoning":
params["rule_packages"] = [ params["rule_packages"] = [
PackageManager.get_package_by_url(current_user, p) for p in rule_packages PackageManager.get_package_by_url(current_user, p) for p in rule_packages
] ]
mod = RuleBasedRelativeReasoning.create(**params) mod = RuleBasedRelativeReasoning.create(**params)
elif s.FLAGS.get("PLUGINS", False) and model_type in s.CLASSIFIER_PLUGINS.values(): elif s.FLAGS.get("PLUGINS", False) and model_type in s.CLASSIFIER_PLUGINS:
pass pass
elif s.FLAGS.get("PLUGINS", False) and model_type in s.PROPERTY_PLUGINS:
params["plugin_identifier"] = model_type
impl = s.PROPERTY_PLUGINS[model_type]
inst = impl()
if inst.requires_rule_packages():
params["rule_packages"] = [
PackageManager.get_package_by_url(current_user, p) for p in rule_packages
]
else:
params["rule_packages"] = []
if not inst.requires_data_packages():
del params["data_packages"]
mod = PropertyPluginModel.create(**params)
else: else:
return error( return error(
request, "Invalid model type.", f'Model type "{model_type}" is not supported."' request, "Invalid model type.", f'Model type "{model_type}" is not supported."'
@ -901,14 +988,18 @@ def package_model(request, package_uuid, model_uuid):
if request.method == "GET": if request.method == "GET":
classify = request.GET.get("classify", False) classify = request.GET.get("classify", False)
ad_assessment = request.GET.get("app-domain-assessment", False) ad_assessment = request.GET.get("app-domain-assessment", False)
# TODO this needs to be generic
half_life = request.GET.get("half_life", False)
if classify or ad_assessment: if any([classify, ad_assessment, half_life]):
smiles = request.GET.get("smiles", "").strip() smiles = request.GET.get("smiles", "").strip()
# Check if smiles is non empty and valid # Check if smiles is non empty and valid
if smiles == "": if smiles == "":
return JsonResponse({"error": "Received empty SMILES"}, status=400) return JsonResponse({"error": "Received empty SMILES"}, status=400)
stereo = FormatConverter.has_stereo(smiles) stereo = FormatConverter.has_stereo(smiles)
try: try:
stand_smiles = FormatConverter.standardize(smiles, remove_stereo=True) stand_smiles = FormatConverter.standardize(smiles, remove_stereo=True)
except ValueError: except ValueError:
@ -942,6 +1033,19 @@ def package_model(request, package_uuid, model_uuid):
return JsonResponse(res, safe=False) return JsonResponse(res, safe=False)
elif half_life:
from epdb.tasks import dispatch_eager, predict_simple
_, run_res = dispatch_eager(
current_user, predict_simple, current_model.pk, stand_smiles, include_svg=True
)
# Here we expect a single result
if isinstance(run_res.result, Iterable):
return JsonResponse(run_res.result[0].model_dump(mode="json"), safe=False)
return JsonResponse(run_res.result.model_dump(mode="json"), safe=False)
else: else:
app_domain_assessment = current_model.app_domain.assess(stand_smiles) app_domain_assessment = current_model.app_domain.assess(stand_smiles)
return JsonResponse(app_domain_assessment, safe=False) return JsonResponse(app_domain_assessment, safe=False)
@ -956,7 +1060,11 @@ def package_model(request, package_uuid, model_uuid):
context["model"] = current_model context["model"] = current_model
context["current_object"] = current_model context["current_object"] = current_model
return render(request, "objects/model.html", context) if isinstance(current_model, PropertyPluginModel):
context["plugin_identifier"] = current_model.plugin_identifier
return render(request, "objects/model/property_model.html", context)
else:
return render(request, "objects/model/classification_model.html", context)
elif request.method == "POST": elif request.method == "POST":
if hidden := request.POST.get("hidden", None): if hidden := request.POST.get("hidden", None):
@ -1567,7 +1675,7 @@ def package_rule(request, package_uuid, rule_uuid):
context = get_base_context(request) context = get_base_context(request)
if smiles := request.GET.get("smiles", False): if smiles := request.GET.get("smiles", False):
stand_smiles = FormatConverter.standardize(smiles) stand_smiles = FormatConverter.standardize(smiles, remove_stereo=True)
res = current_rule.apply(stand_smiles) res = current_rule.apply(stand_smiles)
if len(res) > 1: if len(res) > 1:
logger.info( logger.info(
@ -1895,7 +2003,7 @@ def package_pathways(request, package_uuid):
"Pathway prediction failed due to missing or empty SMILES", "Pathway prediction failed due to missing or empty SMILES",
) )
try: try:
stand_smiles = FormatConverter.standardize(smiles) stand_smiles = FormatConverter.standardize(smiles, remove_stereo=True)
except ValueError: except ValueError:
return error( return error(
request, request,
@ -1916,6 +2024,7 @@ def package_pathways(request, package_uuid):
prediction_setting = SettingManager.get_setting_by_url(current_user, prediction_setting) prediction_setting = SettingManager.get_setting_by_url(current_user, prediction_setting)
else: else:
prediction_setting = current_user.prediction_settings() prediction_setting = current_user.prediction_settings()
pw = Pathway.create( pw = Pathway.create(
current_package, current_package,
stand_smiles, stand_smiles,
@ -2480,8 +2589,10 @@ def package_scenario(request, package_uuid, scenario_uuid):
context["breadcrumbs"] = breadcrumbs(current_package, "scenario", current_scenario) context["breadcrumbs"] = breadcrumbs(current_package, "scenario", current_scenario)
context["scenario"] = current_scenario context["scenario"] = current_scenario
# Get scenarios that have current_scenario as a parent
context["children"] = current_scenario.scenario_set.order_by("name") context["associated_additional_information"] = AdditionalInformation.objects.filter(
scenario=current_scenario
)
# Note: Modals now fetch schemas and data from API endpoints # Note: Modals now fetch schemas and data from API endpoints
# Keeping these for backwards compatibility if needed elsewhere # Keeping these for backwards compatibility if needed elsewhere
@ -2588,11 +2699,22 @@ def user(request, user_uuid):
context["user"] = requested_user context["user"] = requested_user
model_qs = EPModel.objects.none() accessible_packages = PackageManager.get_all_readable_packages(
for p in PackageManager.get_all_readable_packages(requested_user, include_reviewed=True): requested_user, include_reviewed=True
model_qs |= p.models )
context["models"] = model_qs property_models = PropertyPluginModel.objects.filter(
package__in=accessible_packages
).order_by("name")
tp_prediction_models = (
EPModel.objects.filter(package__in=accessible_packages)
.exclude(id__in=[pm.id for pm in property_models])
.order_by("name")
)
context["models"] = tp_prediction_models
context["property_models"] = property_models
context["tokens"] = APIToken.objects.filter(user=requested_user) context["tokens"] = APIToken.objects.filter(user=requested_user)
@ -2769,7 +2891,6 @@ def settings(request):
return render(request, "collections/settings_paginated.html", context) return render(request, "collections/settings_paginated.html", context)
return render(request, "collections/objects_list.html", context)
elif request.method == "POST": elif request.method == "POST":
if s.DEBUG: if s.DEBUG:
for k, v in request.POST.items(): for k, v in request.POST.items():
@ -2781,15 +2902,18 @@ def settings(request):
new_default = request.POST.get("prediction-setting-new-default", "off") == "on" new_default = request.POST.get("prediction-setting-new-default", "off") == "on"
# min 2, max s.DEFAULT_MAX_NUMBER_OF_NODES
max_nodes = min( max_nodes = min(
max( max(
int(request.POST.get("prediction-setting-max-nodes", 1)), int(request.POST.get("prediction-setting-max-nodes", 1)),
s.DEFAULT_MAX_NUMBER_OF_NODES, 2,
), ),
s.DEFAULT_MAX_NUMBER_OF_NODES, s.DEFAULT_MAX_NUMBER_OF_NODES,
) )
# min 1, max s.DEFAULT_MAX_DEPTH
max_depth = min( max_depth = min(
max(int(request.POST.get("prediction-setting-max-depth", 1)), s.DEFAULT_MAX_DEPTH), max(int(request.POST.get("prediction-setting-max-depth", 1)), 1),
s.DEFAULT_MAX_DEPTH, s.DEFAULT_MAX_DEPTH,
) )
@ -2827,6 +2951,18 @@ def settings(request):
else: else:
raise BadRequest("Neither Model-Based nor Rule-Based as Method selected!") raise BadRequest("Neither Model-Based nor Rule-Based as Method selected!")
property_model_urls = request.POST.getlist("prediction-setting-property-models")
if property_model_urls:
mods = []
for pm_url in property_model_urls:
model = PropertyPluginModel.objects.get(url=pm_url)
if PackageManager.readable(current_user, model.package):
mods.append(model)
params["property_models"] = mods
created_setting = SettingManager.create_setting( created_setting = SettingManager.create_setting(
current_user, current_user,
name=name, name=name,
@ -2936,12 +3072,12 @@ def jobs(request):
parts = pair.split(",") parts = pair.split(",")
try: try:
smiles = FormatConverter.standardize(parts[0]) smiles = FormatConverter.standardize(parts[0], remove_stereo=True)
except ValueError: except ValueError:
raise BadRequest(f"Couldn't standardize SMILES {parts[0]}!") raise BadRequest(f"Couldn't standardize SMILES {parts[0]}!")
# name is optional # name is optional
name = parts[1] if len(parts) > 1 else None name = ",".join(parts[1:]) if len(parts) > 1 else None
pred_data.append([smiles, name]) pred_data.append([smiles, name])
max_tps = 50 max_tps = 50

BIN
fixtures/db.dump Normal file

Binary file not shown.

Binary file not shown.

361
pepper/__init__.py Normal file
View File

@ -0,0 +1,361 @@
import logging
import math
import os
import pickle
from datetime import datetime
from typing import Any, List, Optional
import polars as pl
from pydantic import computed_field
from sklearn.metrics import (
mean_absolute_error,
mean_squared_error,
r2_score,
root_mean_squared_error,
)
from sklearn.model_selection import ShuffleSplit
# Once stable these will be exposed by enviPy-plugins lib
from envipy_additional_information import register # noqa: I001
from bridge.contracts import Property, PropertyType # noqa: I001
from bridge.dto import (
BuildResult,
EnviPyDTO,
EvaluationResult,
PredictedProperty,
RunResult,
) # noqa: I001
from .impl.pepper import Pepper # noqa: I001
logger = logging.getLogger(__name__)
@register("pepperprediction")
class PepperPrediction(PredictedProperty):
mean: float | None
std: float | None
log_mean: float | None
log_std: float | None
@computed_field
@property
def svg(self, xscale="linear", quantiles=(0.01, 0.99), n_points=2000) -> Optional[str]:
import io
import matplotlib.patches as mpatches
import numpy as np
from matplotlib import pyplot as plt
from scipy import stats
"""
Plot the lognormal distribution of chemical half-lives where parameters are
given on a base-10 log scale: log10(half-life) ~ Normal(mu_log10, sigma_log10^2).
Shades:
- x < a in green (Non-persistent)
- a <= x <= b in yellow (Persistent)
- x > b in red (Very persistent)
Legend shows the shaded color and the probability mass in each region.
"""
sigma_log10 = self.log_std
mu_log10 = self.log_mean
if sigma_log10 <= 0:
raise ValueError("sigma_log10 must be > 0")
# Persistent and Very Persistent thresholds in days from REACH (https://doi.org/10.26434/chemrxiv-2025-xmslf)
p = 120
vp = 180
# Convert base-10 log parameters to natural-log parameters for SciPy's lognorm
ln10 = np.log(10.0)
mu_ln = mu_log10 * ln10
sigma_ln = sigma_log10 * ln10
# SciPy parameterization: lognorm(s=sigma_ln, scale=exp(mu_ln))
dist = stats.lognorm(s=sigma_ln, scale=np.exp(mu_ln))
# Exact probabilities
p_green = dist.cdf(p) # P(X < p) prob not persistent
p_yellow = 1.0 - dist.cdf(p) # P (X > p) prob persistent
p_red = 1.0 - dist.cdf(vp) # P(X > vp) prob very persistent
# Plotting range
q_low, q_high = dist.ppf(quantiles)
x_min = max(1e-12, min(q_low, p) * 0.9)
x_max = max(q_high, vp) * 1.1
# Build x-grid (linear days axis)
if xscale == "log":
x = np.logspace(np.log10(x_min), np.log10(x_max), n_points)
else:
x = np.linspace(x_min, x_max, n_points)
y = dist.pdf(x)
# Masks for shading
mask_green = x < p
mask_yellow = (x >= p) & (x <= vp)
mask_red = x > vp
# Plot
fig, ax = plt.subplots(figsize=(9, 5.5))
ax.plot(x, y, color="#1f4e79", lw=2, label="Lognormal PDF")
if np.any(mask_green):
ax.fill_between(x[mask_green], y[mask_green], 0, color="tab:green", alpha=0.3)
if np.any(mask_yellow):
ax.fill_between(x[mask_yellow], y[mask_yellow], 0, color="gold", alpha=0.35)
if np.any(mask_red):
ax.fill_between(x[mask_red], y[mask_red], 0, color="tab:red", alpha=0.3)
# Threshold lines
ax.axvline(p, color="gray", ls="--", lw=1)
ax.axvline(vp, color="gray", ls="--", lw=1)
# Labels & title
ax.set_title(
f"Half-life Distribution (Lognormal)\nlog10 parameters: μ={mu_log10:g}, σ={sigma_log10:g}"
)
ax.set_xlabel("Half-life (days)")
ax.set_ylabel("Probability density")
ax.grid(True, alpha=0.25)
if xscale == "log":
ax.set_xscale("log") # not used in this example, but supported
# Legend with probabilities
patches = [
mpatches.Patch(
color="tab:green",
alpha=0.3,
label=f"Non-persistent (<{p:g} d): {p_green:.2%}",
),
mpatches.Patch(
color="gold",
alpha=0.35,
label=f"Persistent ({p:g}{vp:g} d): {p_yellow:.2%}",
),
mpatches.Patch(
color="tab:red",
alpha=0.3,
label=f"Very persistent (>{vp:g} d): {p_red:.2%}",
),
]
ax.legend(handles=patches, frameon=True)
plt.tight_layout()
# --- Export to SVG string ---
buf = io.StringIO()
fig.savefig(buf, format="svg", bbox_inches="tight")
svg = buf.getvalue()
plt.close(fig)
buf.close()
return svg
class PEPPER(Property):
def identifier(self) -> str:
return "pepper"
def display(self) -> str:
return "PEPPER"
def name(self) -> str:
return "Predict Environmental Pollutant PERsistence"
def requires_rule_packages(self) -> bool:
return False
def requires_data_packages(self) -> bool:
return True
def get_type(self) -> PropertyType:
return PropertyType.HEAVY
def generate_dataset(self, eP: EnviPyDTO) -> pl.DataFrame:
"""
Generates a dataset in the form of a Polars DataFrame containing compound information, including
SMILES strings and logarithmic values of degradation half-lives (dt50).
The dataset is built by iterating over a list of compounds, standardizing SMILES strings, and
calculating the logarithmic mean of the half-life intervals for different environmental scenarios
associated with each compound.
The resulting DataFrame will only include unique rows based on SMILES and logarithmic half-life
values.
Parameters:
eP (EnviPyDTO): An object that provides access to compound data and utility functions for
standardization and retrieval of half-life information.
Returns:
pl.DataFrame: The resulting dataset with unique rows containing compound structure identifiers,
standardized SMILES strings, and logarithmic half-life values.
Raises:
Exception: Exceptions are caught and logged during data processing, specifically when retrieving
half-life information.
Note:
- The logarithmic mean is calculated from the start and end intervals of the dt50 (half-life).
- Compounds not associated with any half-life data are skipped, and errors encountered during processing
are logged without halting the execution.
"""
columns = ["structure_id", "smiles", "dt50_log"]
rows = []
for c in eP.get_compounds():
hls = c.half_lifes()
if len(hls):
stand_smiles = eP.standardize(c.smiles, remove_stereo=True)
for scenario, half_lives in hls.items():
for h in half_lives:
# In the original Pepper code they take the mean of the start and end interval.
half_mean = (h.dt50.start + h.dt50.end) / 2
rows.append([str(c.url), stand_smiles, math.log10(half_mean)])
df = pl.DataFrame(data=rows, schema=columns, orient="row", infer_schema_length=None)
df = df.unique(subset=["smiles", "dt50_log"], keep="any", maintain_order=False)
return df
def save_dataset(self, df: pl.DataFrame, path: str):
with open(path, "wb") as fh:
pickle.dump(df, fh)
def load_dataset(self, path: str) -> pl.DataFrame:
with open(path, "rb") as fh:
return pickle.load(fh)
def build(self, eP: EnviPyDTO, *args, **kwargs) -> BuildResult | None:
logger.info(f"Start building PEPPER {eP.get_context().uuid}")
df = self.generate_dataset(eP)
if df.shape[0] == 0:
raise ValueError("No data found for building model")
p = Pepper()
p, train_ds = p.train_model(df)
ds_store_path = os.path.join(
eP.get_context().work_dir, f"pepper_ds_{eP.get_context().uuid}.pkl"
)
self.save_dataset(train_ds, ds_store_path)
model_store_path = os.path.join(
eP.get_context().work_dir, f"pepper_{eP.get_context().uuid}.pkl"
)
p.save_model(model_store_path)
logger.info(f"Finished building PEPPER {eP.get_context().uuid}")
def run(self, eP: EnviPyDTO, *args, **kwargs) -> RunResult:
load_path = os.path.join(eP.get_context().work_dir, f"pepper_{eP.get_context().uuid}.pkl")
p = Pepper.load_model(load_path)
X_new = [c.smiles for c in eP.get_compounds()]
predictions = p.predict_batch(X_new)
results = []
for p in zip(*predictions):
if p[0] is None or p[1] is None:
result = {"log_mean": None, "mean": None, "log_std": None, "std": None, "svg": None}
else:
result = {
"log_mean": p[0],
"mean": 10 ** p[0],
"log_std": p[1],
"std": 10 ** p[1],
}
results.append(PepperPrediction(**result))
rr = RunResult(
producer=eP.get_context().url,
description=f"Generated at {datetime.now()}",
result=results,
)
return rr
def evaluate(self, eP: EnviPyDTO, *args, **kwargs) -> EvaluationResult | None:
logger.info(f"Start evaluating PEPPER {eP.get_context().uuid}")
load_path = os.path.join(eP.get_context().work_dir, f"pepper_{eP.get_context().uuid}.pkl")
p = Pepper.load_model(load_path)
df = self.generate_dataset(eP)
ds = p.preprocess_data(df)
y_pred = p.predict_batch(ds["smiles"])
# We only need the mean
if isinstance(y_pred, tuple):
y_pred = y_pred[0]
res = self.eval_stats(ds["dt50_bayesian_mean"], y_pred)
logger.info(f"Finished evaluating PEPPER {eP.get_context().uuid}")
return EvaluationResult(data=res)
def build_and_evaluate(self, eP: EnviPyDTO, *args, **kwargs) -> EvaluationResult | None:
logger.info(f"Start evaluating PEPPER {eP.get_context().uuid}")
ds_load_path = os.path.join(
eP.get_context().work_dir, f"pepper_ds_{eP.get_context().uuid}.pkl"
)
ds = self.load_dataset(ds_load_path)
n_splits = kwargs.get("n_splits", 20)
shuff = ShuffleSplit(n_splits=n_splits, test_size=0.1, random_state=42)
fold_metrics: List[dict[str, Any]] = []
for split_id, (train_index, test_index) in enumerate(shuff.split(ds)):
logger.info(f"Evaluation fold {split_id}/{n_splits} PEPPER {eP.get_context().uuid}")
train = ds[train_index]
test = ds[test_index]
model = Pepper()
model.train_model(train, preprocess=False)
features = test[model.descriptors.get_descriptor_names()].rows()
y_pred = model.predict_batch(features, is_smiles=False)
# We only need the mean for eval statistics but mean, std can be returned
if isinstance(y_pred, tuple) or isinstance(y_pred, list):
y_pred = y_pred[0]
# Remove None if they occur
y_true_filtered, y_pred_filtered = [], []
for t, p in zip(test["dt50_bayesian_mean"], y_pred):
if p is None:
continue
y_true_filtered.append(t)
y_pred_filtered.append(p)
if len(y_true_filtered) == 0:
print("Skipping empty fold")
continue
fold_metrics.append(self.eval_stats(y_true_filtered, y_pred_filtered))
logger.info(f"Finished evaluating PEPPER {eP.get_context().uuid}")
return EvaluationResult(data=fold_metrics)
@staticmethod
def eval_stats(y_true, y_pred) -> dict[str, float]:
scores_dic = {
"r2": r2_score(y_true, y_pred),
"mse": mean_squared_error(y_true, y_pred),
"rmse": root_mean_squared_error(y_true, y_pred),
"mae": mean_absolute_error(y_true, y_pred),
}
return scores_dic

0
pepper/impl/__init__.py Normal file
View File

196
pepper/impl/bayesian.py Normal file
View File

@ -0,0 +1,196 @@
import emcee
import numpy as np
from scipy.stats import lognorm, norm
class Bayesian:
def __init__(self, y, comment_list=None):
if comment_list is None:
comment_list = []
self.y = y
self.comment_list = comment_list
# LOQ default settings
self.LOQ_lower = -1 # (2.4 hours)
self.LOQ_upper = 3 # 1000 days
# prior default settings
self.prior_mu_mean = 1.5
self.prior_mu_std = 2
self.prior_sigma_mean = 0.4
self.prior_sigma_std = 0.4
self.lower_limit_sigma = 0.2
# EMCEE defaults
self.nwalkers = 10
self.iterations = 2000
self.burn_in = 100
ndim = 2 # number of dimensions (mean, std)
# backend = emcee.backends.HDFBackend("backend.h5")
# backend.reset(self.nwalkers, ndim)
self.sampler = emcee.EnsembleSampler(self.nwalkers, ndim, self.logPosterior)
self.posterior_mu = None
self.posterior_sigma = None
def get_censored_values_only(self):
censored_values = []
for i, comment in enumerate(self.comment_list):
if comment in ["<", ">"]:
censored_values.append(self.y[i])
elif self.y[i] > self.LOQ_upper or self.y[i] < self.LOQ_lower:
censored_values.append(self.y[i])
return censored_values
# Class functions
def determine_LOQ(self):
"""
Determines if the LOQ is upper or lower, and the value (if not default)
:return: upper_LOQ , lower_LOQ
"""
censored_values = self.get_censored_values_only()
# Find upper LOQ
upper_LOQ = np.nan
# bigger than global LOQ
if max(self.y) >= self.LOQ_upper:
upper_LOQ = self.LOQ_upper
# case if exactly 365 days
elif max(self.y) == 2.562: # 365 days
upper_LOQ = 2.562
self.LOQ_upper = upper_LOQ
# case if "bigger than" indication in comments
elif ">" in self.comment_list:
i = 0
while i < len(self.y):
if self.y[i] == min(censored_values) and self.comment_list[i] == ">":
self.LOQ_upper = self.y[i]
break
i += 1
# Find lower LOQ
lower_LOQ = np.nan
# smaller than global LOQ
if min(self.y) <= self.LOQ_lower:
lower_LOQ = self.LOQ_lower
# case if exactly 1 day
elif min(self.y) == 0: # 1 day
lower_LOQ = 0
self.LOQ_lower = 0
# case if "smaller than" indication in comments
elif "<" in self.comment_list:
i = 0
while i < len(self.y):
if self.y[i] == max(censored_values) and self.comment_list[i] == "<":
self.LOQ_lower = self.y[i]
break
i += 1
return upper_LOQ, lower_LOQ
def logLikelihood(self, theta, sigma):
"""
Likelihood function (the probability of a dataset (mean, std) given the model parameters)
Convert not censored observations into type numeric
:param theta: mean half-life value to be evaluated
:param sigma: std half-life value to be evaluated
:return: log_likelihood
"""
upper_LOQ, lower_LOQ = self.determine_LOQ()
n_censored_upper = 0
n_censored_lower = 0
y_not_cen = []
if np.isnan(upper_LOQ) and np.isnan(lower_LOQ):
y_not_cen = self.y
else:
for i in self.y:
if np.isnan(upper_LOQ) and i >= upper_LOQ: # censor above threshold
n_censored_upper += 1
if np.isnan(lower_LOQ) and i <= lower_LOQ: # censor below threshold
n_censored_lower += 1
else: # do not censor
y_not_cen.append(i)
LL_left_cen = 0
LL_right_cen = 0
LL_not_cen = 0
# likelihood for not censored observations
if n_censored_lower > 0: # loglikelihood for left censored observations
LL_left_cen = n_censored_lower * norm.logcdf(
lower_LOQ, loc=theta, scale=sigma
) # cumulative distribution function CDF
if n_censored_upper > 0: # loglikelihood for right censored observations
LL_right_cen = n_censored_upper * norm.logsf(
upper_LOQ, loc=theta, scale=sigma
) # survival function (1-CDF)
if len(y_not_cen) > 0: # loglikelihood for uncensored values
LL_not_cen = sum(
norm.logpdf(y_not_cen, loc=theta, scale=sigma)
) # probability density function PDF
return LL_left_cen + LL_not_cen + LL_right_cen
def get_prior_probability_sigma(self, sigma):
# convert mean and sd to logspace parameters, to see this formula check
# https://en.wikipedia.org/wiki/Log-normal_distribution under Method of moments section
temp = 1 + (self.prior_sigma_std / self.prior_sigma_mean) ** 2
meanlog = self.prior_sigma_mean / np.sqrt(temp)
sdlog = np.sqrt(np.log(temp))
# calculate of logpdf of sigma
norm_pdf_sigma = lognorm.logpdf(sigma, s=sdlog, loc=self.lower_limit_sigma, scale=meanlog)
return norm_pdf_sigma
def get_prior_probability_theta(self, theta):
norm_pdf_theta = norm.logpdf(theta, loc=self.prior_mu_mean, scale=self.prior_mu_std)
return norm_pdf_theta
def logPrior(self, par):
"""
Obtain prior loglikelihood of [theta, sigma]
:param par: par = [theta,sigma]
:return: loglikelihood
"""
# calculate the mean and standard deviation in the log-space
norm_pdf_mean = self.get_prior_probability_theta(par[0])
norm_pdf_std = self.get_prior_probability_sigma(par[1])
log_norm_pdf = [norm_pdf_mean, norm_pdf_std]
return sum(log_norm_pdf)
def logPosterior(self, par):
"""
Obtain posterior loglikelihood
:param par: [theta, sigma]
:return: posterior loglikelihood
"""
logpri = self.logPrior(par)
if not np.isfinite(logpri):
return -np.inf
loglikelihood = self.logLikelihood(par[0], par[1])
return logpri + loglikelihood
def get_posterior_distribution(self):
"""
Sample posterior distribution and get median of mean and std samples
:return: posterior half-life mean and std
"""
if self.posterior_mu:
return self.posterior_mu, self.posterior_sigma
# Sampler parameters
ndim = 2 # number of dimensions (mean,std)
p0 = abs(np.random.randn(self.nwalkers, ndim)) # only positive starting numbers (for std)
# Sample distribution
self.sampler.run_mcmc(p0, self.iterations)
# get chain and log_prob in one-dimensional array (merged chains with burn-in)
samples = self.sampler.get_chain(flat=True, discard=100)
# get median mean and std
self.posterior_mu = np.median(samples[:, 0])
self.posterior_sigma = np.median(samples[:, 1])
return self.posterior_mu, self.posterior_sigma
# Utility functions
def get_normal_distribution(x, mu, sig):
return np.exp(-np.power(x - mu, 2.0) / (2 * np.power(sig, 2.0)))

View File

@ -0,0 +1,11 @@
GPR:
name: Gaussian Process Regressor
regressor: GaussianProcessRegressor
regressor_params:
normalize_y: True
n_restarts_optimizer: 0
kernel: "ConstantKernel(1.0, (1e-3, 1e3)) * Matern(length_scale=2.5, length_scale_bounds=(1e-3, 1e3), nu=0.5)"
feature_reduction_method: None
feature_reduction_parameters:
pca:
n_components: 34

View File

@ -0,0 +1,60 @@
from abc import ABC, abstractmethod
from typing import List
from mordred import Calculator, descriptors
from padelpy import from_smiles
from rdkit import Chem
class Descriptor(ABC):
@abstractmethod
def get_molecule_descriptors(self, molecule: str) -> List[float | int] | None:
pass
@abstractmethod
def get_descriptor_names(self) -> List[str]:
pass
class Mordred(Descriptor):
calc = Calculator(descriptors, ignore_3D=True)
def get_molecule_descriptors(self, molecule: str) -> List[float | int] | None:
mol = Chem.MolFromSmiles(molecule)
res = list(self.calc(mol))
return res
def get_descriptor_names(self) -> List[str]:
return [f"Mordred_{i}" for i in range(len(self.calc.descriptors))]
class PaDEL(Descriptor):
calc = Calculator(descriptors)
def get_molecule_descriptors(self, molecule: str) -> List[float | int] | None:
try:
padel_descriptors = from_smiles(molecule, threads=1)
except RuntimeError:
return []
formatted = []
for k, v in padel_descriptors.items():
try:
formatted.append(float(v))
except ValueError:
formatted.append(0.0)
return formatted
def get_descriptor_names(self) -> List[str]:
return [f"PaDEL_{i}" for i in range(1875)]
if __name__ == "__main__":
mol = "CC1=CC(O)=CC=C1[N+](=O)[O-]"
m = Mordred()
print(list(m.get_molecule_descriptors(mol)))
p = PaDEL()
print(list(p.get_molecule_descriptors(mol)))

329
pepper/impl/pepper.py Normal file
View File

@ -0,0 +1,329 @@
import importlib.resources
import logging
import math
import os
import pickle
from collections import defaultdict
from typing import List
import numpy as np
import polars as pl
import yaml
from joblib import Parallel, delayed
from scipy.cluster import hierarchy
from scipy.spatial.distance import squareform
from scipy.stats import spearmanr
from sklearn.feature_selection import VarianceThreshold
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import FunctionTransformer, MinMaxScaler
from .bayesian import Bayesian
from .descriptors import Mordred
class Pepper:
def __init__(self, config_path=None, random_state=42):
self.random_state = random_state
if config_path is None:
config_path = importlib.resources.files("pepper.impl.config").joinpath(
"regressor_settings_singlevalue_soil_paper_GPR_optimized.yml"
)
with open(config_path, "r") as file:
regressor_settings = yaml.safe_load(file)
if len(regressor_settings) > 1:
logging.warning(
f"More than one regressor config found in {config_path}, using the first one"
)
self.regressor_settings = regressor_settings[list(regressor_settings.keys())[0]]
if "kernel" in self.regressor_settings["regressor_params"]:
from sklearn.gaussian_process.kernels import ConstantKernel, Matern # noqa: F401
# We could hard-code the kernels they have, maybe better than using eval
self.regressor_settings["regressor_params"]["kernel"] = eval(
self.regressor_settings["regressor_params"]["kernel"]
)
# We assume the YAML has the key regressor containing a regressor name
self.regressor = self.get_regressor_by_name(self.regressor_settings["regressor"])
if "regressor_params" in self.regressor_settings: # Set params if any are given
self.regressor.set_params(**self.regressor_settings["regressor_params"])
# TODO we could make this configurable
self.descriptors = Mordred()
self.descriptor_subset = None
self.min_max_scaler = MinMaxScaler().set_output(transform="polars")
self.feature_preselector = Pipeline(
[
(
"variance_threshold",
VarianceThreshold(threshold=0.02).set_output(transform="polars"),
),
# Feature selection based on variance threshold
(
"custom_feature_selection",
FunctionTransformer(
func=self.remove_highly_correlated_features,
validate=False,
kw_args={"corr_method": "spearman", "cluster_threshold": 0.01},
).set_output(transform="polars"),
),
]
)
def get_regressor_by_name(self, regressor_string):
"""
Load regressor function from a regressor name
:param regressor_string: name of regressor as defined in config file (function name with parentheses)
:return: Regressor object
"""
# if regressor_string == 'RandomForestRegressor':
# return RandomForestRegressor(random_state=self.random_state)
# elif regressor_string == 'GradientBoostingRegressor':
# return GradientBoostingRegressor(random_state=self.random_state)
# elif regressor_string == 'AdaBoostRegressor':
# return AdaBoostRegressor(random_state=self.random_state)
# elif regressor_string == 'MLPRegressor':
# return MLPRegressor(random_state=self.random_state)
# elif regressor_string == 'SVR':
# return SVR()
# elif regressor_string == 'KNeighborsRegressor':
# return KNeighborsRegressor()
if regressor_string == "GaussianProcessRegressor":
return GaussianProcessRegressor(random_state=self.random_state)
# elif regressor_string == 'DecisionTreeRegressor':
# return DecisionTreeRegressor(random_state=self.random_state)
# elif regressor_string == 'Ridge':
# return Ridge(random_state=self.random_state)
# elif regressor_string == 'SGDRegressor':
# return SGDRegressor(random_state=self.random_state)
# elif regressor_string == 'KernelRidge':
# return KernelRidge()
# elif regressor_string == 'LinearRegression':
# return LinearRegression()
# elif regressor_string == 'LSVR':
# return SVR(kernel='linear') # Linear Support Vector Regressor
else:
raise NotImplementedError(
f"No regressor type defined for regressor_string = {regressor_string}"
)
def train_model(self, train_data, preprocess=True):
"""
Fit self.regressor and preprocessors. train_data is a pl.DataFrame
"""
if preprocess:
# Compute the mean and std of half-lives per structure
train_data = self.preprocess_data(train_data)
# train_data structure:
# columns = [
# "structure_id",
# "smiles",
# "dt50_log",
# "dt50_bayesian_mean",
# "dt50_bayesian_std",
# ] + self.descriptors.get_descriptor_names()
# only select descriptor features for feature preselector
df = train_data[self.descriptors.get_descriptor_names()]
# Remove columns having at least None, nan, inf, "" value
df = Pepper.keep_clean_columns(df)
# Scale and Remove highly correlated features as well as features having a low variance
x_train_normal = self.min_max_scaler.fit_transform(df)
x_train_normal = self.feature_preselector.fit_transform(x_train_normal)
# Store subset, as this is the input used for prediction
self.descriptor_subset = x_train_normal.columns
y_train = train_data["dt50_bayesian_mean"].to_numpy()
y_train_std = train_data["dt50_bayesian_std"].to_numpy()
self.regressor.set_params(alpha=y_train_std)
self.regressor.fit(x_train_normal, y_train)
return self, train_data
@staticmethod
def keep_clean_columns(df: pl.DataFrame) -> pl.DataFrame:
"""
Filters out columns from the DataFrame that contain null values, NaN, or infinite values.
This static method takes a DataFrame as input and evaluates each of its columns to determine
if the column contains invalid values. Columns that have null values, NaN, or infinite values
are excluded from the resulting DataFrame. The method is especially useful for cleaning up a
dataset by keeping only the valid columns.
Parameters:
df (polars.DataFrame): The input DataFrame to be cleaned.
Returns:
polars.DataFrame: A DataFrame containing only columns without null, NaN, or infinite values.
"""
valid_cols = []
for col in df.columns:
s = df[col]
# Check nulls
has_null = s.null_count() > 0
# Check NaN and inf only for numeric columns
if s.dtype.is_numeric():
has_nan = s.is_nan().any()
has_inf = s.is_infinite().any()
else:
has_nan = False
has_inf = False
if not (has_null or has_nan or has_inf):
valid_cols.append(col)
return df.select(valid_cols)
def preprocess_data(self, dataset):
groups = [group for group in dataset.group_by("structure_id")]
# Unless explicitly set compute everything serial
if os.environ.get("N_PEPPER_THREADS", 1) > 1:
results = Parallel(n_jobs=os.environ["N_PEPPER_THREADS"])(
delayed(compute_bayes_per_group)(group[1])
for group in dataset.group_by("structure_id")
)
else:
results = []
for g in groups:
results.append(compute_bayes_per_group(g[1]))
bayes_stats = pl.concat(results, how="vertical")
dataset = dataset.join(bayes_stats, on="structure_id", how="left")
# Remove duplicates after calculating mean, std
dataset = dataset.unique(subset="structure_id")
# Calculate and normalise features, make a "desc" column with the features
dataset = dataset.with_columns(
pl.col("smiles")
.map_elements(
self.descriptors.get_molecule_descriptors, return_dtype=pl.List(pl.Float64)
)
.alias("desc")
)
# If a SMILES fails to get desc it is removed
dataset = dataset.filter(pl.col("desc").is_not_null() & (pl.col("desc").list.len() > 0))
# Flatten the features into the dataset
dataset = dataset.with_columns(
pl.col("desc").list.to_struct(fields=self.descriptors.get_descriptor_names())
).unnest("desc")
return dataset
def predict_batch(self, batch: List[str], is_smiles: bool = True) -> List[List[float | None]]:
if is_smiles:
rows = [self.descriptors.get_molecule_descriptors(smiles) for smiles in batch]
else:
rows = batch
# Create Dataframe with all descriptors
initial_desc_rows_df = pl.DataFrame(
data=rows, schema=self.descriptors.get_descriptor_names(), orient="row"
)
# Before checking for invalid values per row, select only required columns
initial_desc_rows_df = initial_desc_rows_df.select(
list(self.min_max_scaler.feature_names_in_)
)
to_pad = []
adjusted_rows = []
for i, row in enumerate(initial_desc_rows_df.rows()):
# neither infs nor nans are found -> rows seems to be valid input
if row and not any(math.isinf(x) for x in row) and not any(math.isnan(x) for x in row):
adjusted_rows.append(row)
else:
to_pad.append(i)
if adjusted_rows:
desc_rows_df = pl.DataFrame(
data=adjusted_rows, schema=list(self.min_max_scaler.feature_names_in_), orient="row"
)
x_normal = self.min_max_scaler.transform(desc_rows_df)
x_normal = x_normal[self.descriptor_subset]
res = self.regressor.predict(x_normal, return_std=True)
# Convert to lists
res = [list(res[0]), list(res[1])]
# If we had rows containing bad input (inf, nan) insert Nones at the correct position
if to_pad:
for i in to_pad:
res[0].insert(i, None)
res[1].insert(i, None)
return res
else:
return [[None] * len(batch), [None] * len(batch)]
@staticmethod
def remove_highly_correlated_features(
X_train,
corr_method: str = "spearman",
cluster_threshold: float = 0.01,
ignore=False,
):
if ignore:
return X_train
# pass
else:
# Using spearmanr from scipy to achieve pandas.corr in polars
corr = spearmanr(X_train, axis=0).statistic
# Ensure the correlation matrix is symmetric
corr = (corr + corr.T) / 2
np.fill_diagonal(corr, 1)
corr = np.nan_to_num(corr)
# code from https://scikit-learn.org/stable/auto_examples/inspection/
# plot_permutation_importance_multicollinear.html
# We convert the correlation matrix to a distance matrix before performing
# hierarchical clustering using Ward's linkage.
distance_matrix = 1 - np.abs(corr)
dist_linkage = hierarchy.ward(squareform(distance_matrix))
cluster_ids = hierarchy.fcluster(dist_linkage, cluster_threshold, criterion="distance")
cluster_id_to_feature_ids = defaultdict(list)
for idx, cluster_id in enumerate(cluster_ids):
cluster_id_to_feature_ids[cluster_id].append(idx)
my_selected_features = [v[0] for v in cluster_id_to_feature_ids.values()]
X_train_sel = X_train[:, my_selected_features]
return X_train_sel
def save_model(self, path):
with open(path, "wb") as save_file:
pickle.dump(self, save_file, protocol=5)
@staticmethod
def load_model(path) -> "Pepper":
with open(path, "rb") as load_file:
return pickle.load(load_file)
def compute_bayes_per_group(group):
"""Get mean and std using bayesian"""
mean, std = Bayesian(group["dt50_log"]).get_posterior_distribution()
return pl.DataFrame(
{
"structure_id": [group["structure_id"][0]],
"dt50_bayesian_mean": [mean],
"dt50_bayesian_std": [std],
}
)

View File

@ -36,7 +36,7 @@ dependencies = [
[tool.uv.sources] [tool.uv.sources]
enviformer = { git = "ssh://git@git.envipath.com/enviPath/enviformer.git", rev = "v0.1.4" } enviformer = { git = "ssh://git@git.envipath.com/enviPath/enviformer.git", rev = "v0.1.4" }
envipy-plugins = { git = "ssh://git@git.envipath.com/enviPath/enviPy-plugins.git", rev = "v0.1.0" } envipy-plugins = { git = "ssh://git@git.envipath.com/enviPath/enviPy-plugins.git", rev = "v0.1.0" }
envipy-additional-information = { git = "ssh://git@git.envipath.com/enviPath/enviPy-additional-information.git", rev = "v0.4.2" } envipy-additional-information = { git = "ssh://git@git.envipath.com/enviPath/enviPy-additional-information.git", branch = "develop" }
envipy-ambit = { git = "ssh://git@git.envipath.com/enviPath/enviPy-ambit.git" } envipy-ambit = { git = "ssh://git@git.envipath.com/enviPath/enviPy-ambit.git" }
[project.optional-dependencies] [project.optional-dependencies]
@ -51,7 +51,13 @@ dev = [
"pytest-django>=4.11.1", "pytest-django>=4.11.1",
"pytest-cov>=7.0.0", "pytest-cov>=7.0.0",
] ]
pepper-plugin = [
"matplotlib>=3.10.8",
"pyyaml>=6.0.3",
"emcee>=3.1.6",
"mordredcommunity==2.0.7",
"padelpy" # Remove once we're certain we'll go with mordred
]
[tool.ruff] [tool.ruff]
line-length = 100 line-length = 100

View File

@ -161,8 +161,18 @@ document.addEventListener("alpine:init", () => {
set value(v) { set value(v) {
this.data[this.fieldName] = v; this.data[this.fieldName] = v;
}, },
get multiple() {
return !!(this.fieldSchema.items && this.fieldSchema.items.enum);
},
get options() { get options() {
return this.fieldSchema.enum || []; if (this.fieldSchema.enum) {
return this.fieldSchema.enum;
} else if (this.fieldSchema.items && this.fieldSchema.items.enum) {
return this.fieldSchema.items.enum;
} else {
return [];
}
}, },
}), }),
); );

View File

@ -453,6 +453,29 @@ function draw(pathway, elem) {
} }
} }
if (predictedPropertyViewEnabled) {
var tempContent = "";
if (Object.keys(n.predicted_properties).length > 0) {
if ("PepperPrediction" in n.predicted_properties) {
// TODO needs to be generic once we store it as AddInf
for (var s of n.predicted_properties["PepperPrediction"]) {
if (s["mean"] != null) {
tempContent += "<b>DT50 predicted via Pepper:</b> " + s["mean"].toFixed(2) + "<br>"
}
}
}
}
if (tempContent === "") {
tempContent = "<b>No predicted properties for this Node</b><br>";
}
popupContent += tempContent
}
popupContent += "<img src='" + n.image + "'><br>" popupContent += "<img src='" + n.image + "'><br>"
if (n.scenarios.length > 0) { if (n.scenarios.length > 0) {
popupContent += '<b>Half-lives and related scenarios:</b><br>' popupContent += '<b>Half-lives and related scenarios:</b><br>'
@ -473,7 +496,6 @@ function draw(pathway, elem) {
popupContent = "<a href='" + e.url + "'>" + e.name + "</a><br><br>"; popupContent = "<a href='" + e.url + "'>" + e.name + "</a><br><br>";
if (e.reaction.rules) { if (e.reaction.rules) {
console.log(e.reaction.rules);
for (var rule of e.reaction.rules) { for (var rule of e.reaction.rules) {
popupContent += "Rule <a href='" + rule.url + "'>" + rule.name + "</a><br>"; popupContent += "Rule <a href='" + rule.url + "'>" + rule.name + "</a><br>";
} }

View File

@ -9,6 +9,39 @@
<input type="hidden" name="job-name" value="batch-predict" /> <input type="hidden" name="job-name" value="batch-predict" />
<fieldset class="flex flex-col gap-4 md:flex-3/4"> <fieldset class="flex flex-col gap-4 md:flex-3/4">
<!-- CSV Upload Section -->
<div class="mb-6 rounded-lg border-2 border-dashed border-base-300 p-6">
<div class="flex flex-col gap-4">
<div
class="flex flex-col gap-3 sm:flex-row sm:items-center sm:justify-between"
>
<div class="flex-1">
<h3 class="text-base font-medium text-base-content mb-1">
Load from CSV
</h3>
<p class="text-sm text-base-content/70">
Upload a CSV file with SMILES and name columns, or insert
manually in the table below
</p>
</div>
<div class="flex-shrink-0">
<input
type="file"
id="csv-file"
accept=".csv,.txt"
class="file-input file-input-bordered file-input-sm w-full sm:w-auto"
/>
</div>
</div>
<div
class="text-xs text-base-content/50 border-t border-base-300 pt-3"
>
<strong>Format:</strong> First column = SMILES, Second column =
Name (headers optional) • Maximum 30 rows
</div>
</div>
</div>
<table class="table table-zebra w-full"> <table class="table table-zebra w-full">
<thead> <thead>
<tr> <tr>
@ -113,10 +146,16 @@
<script> <script>
const tableBody = document.getElementById("smiles-table-body"); const tableBody = document.getElementById("smiles-table-body");
const addRowBtn = document.getElementById("add-row-btn"); const addRowBtn = document.getElementById("add-row-btn");
const csvFileInput = document.getElementById("csv-file");
const form = document.getElementById("smiles-form"); const form = document.getElementById("smiles-form");
const hiddenField = document.getElementById("substrates"); const hiddenField = document.getElementById("substrates");
addRowBtn.addEventListener("click", () => { // Function to create a new table row
function createTableRow(
smilesValue = "",
nameValue = "",
placeholder = true,
) {
const row = document.createElement("tr"); const row = document.createElement("tr");
const tdSmiles = document.createElement("td"); const tdSmiles = document.createElement("td");
@ -125,19 +164,147 @@
const smilesInput = document.createElement("input"); const smilesInput = document.createElement("input");
smilesInput.type = "text"; smilesInput.type = "text";
smilesInput.className = "input input-bordered w-full smiles-input"; smilesInput.className = "input input-bordered w-full smiles-input";
smilesInput.placeholder = "SMILES"; smilesInput.placeholder = placeholder ? "SMILES" : "";
smilesInput.value = smilesValue;
const nameInput = document.createElement("input"); const nameInput = document.createElement("input");
nameInput.type = "text"; nameInput.type = "text";
nameInput.className = "input input-bordered w-full name-input"; nameInput.className = "input input-bordered w-full name-input";
nameInput.placeholder = "Name"; nameInput.placeholder = placeholder ? "Name" : "";
nameInput.value = nameValue;
tdSmiles.appendChild(smilesInput); const smilesLabel = document.createElement("label");
tdName.appendChild(nameInput); smilesLabel.appendChild(smilesInput);
tdSmiles.appendChild(smilesLabel);
const nameLabel = document.createElement("label");
nameLabel.appendChild(nameInput);
tdName.appendChild(nameLabel);
row.appendChild(tdSmiles); row.appendChild(tdSmiles);
row.appendChild(tdName); row.appendChild(tdName);
return row;
}
// Function to clear the table
function clearTable() {
tableBody.innerHTML = "";
}
// Function to populate table from CSV data
function populateTableFromCSV(csvData) {
const lines = csvData.trim().split("\n");
const maxRows = 30;
// Clear existing table
clearTable();
// Skip header row if it looks like headers
const startIndex =
lines.length > 0 &&
(lines[0].toLowerCase().includes("smiles") ||
lines[0].toLowerCase().includes("name"))
? 1
: 0;
let rowCount = 0;
for (let i = startIndex; i < lines.length && rowCount < maxRows; i++) {
const line = lines[i].trim();
if (!line) continue;
// Parse CSV line - split by comma, first part is SMILES, rest is name
const firstCommaIndex = line.indexOf(",");
let smiles = "";
let name = "";
if (firstCommaIndex === -1) {
// No comma found, treat entire line as SMILES
smiles = line.trim().replace(/^"(.*)"$/, "$1");
} else {
// Split at first comma only
smiles = line
.substring(0, firstCommaIndex)
.trim()
.replace(/^"(.*)"$/, "$1");
name = line
.substring(firstCommaIndex + 1)
.trim()
.replace(/^"(.*)"$/, "$1");
}
// Skip empty rows
if (!smiles && !name) continue;
const row = createTableRow(smiles, name, false);
tableBody.appendChild(row);
rowCount++;
}
// Add at least one empty row if no data was loaded
if (rowCount === 0) {
const row = createTableRow();
tableBody.appendChild(row);
}
// Show success message
if (rowCount > 0) {
const message =
rowCount >= maxRows
? `Loaded ${rowCount} rows (maximum reached)`
: `Loaded ${rowCount} rows from CSV`;
// Create temporary success notification
const notification = document.createElement("div");
notification.className = "alert alert-success mb-4";
notification.innerHTML = `
<svg xmlns="http://www.w3.org/2000/svg" class="stroke-current shrink-0 h-6 w-6" fill="none" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z" />
</svg>
<span>${message}</span>
`;
// Insert notification before the table
const tableContainer = document.querySelector("table").parentNode;
tableContainer.insertBefore(
notification,
document.querySelector("table"),
);
// Remove notification after 3 seconds
setTimeout(() => {
if (notification.parentNode) {
notification.parentNode.removeChild(notification);
}
}, 3000);
}
}
// Handle CSV file selection
csvFileInput.addEventListener("change", function (event) {
const file = event.target.files[0];
if (!file) return;
// Check file type
if (!file.name.match(/\.(csv|txt)$/i)) {
alert("Please select a CSV or TXT file.");
return;
}
const reader = new FileReader();
reader.onload = function (e) {
try {
populateTableFromCSV(e.target.result);
} catch (error) {
console.error("Error parsing CSV:", error);
alert("Error parsing CSV file. Please check the file format.");
}
};
reader.readAsText(file);
});
// Handle add row button
addRowBtn.addEventListener("click", () => {
const row = createTableRow();
tableBody.appendChild(row); tableBody.appendChild(row);
}); });
@ -154,7 +321,7 @@
const smiles = smilesInputs[i].value.trim(); const smiles = smilesInputs[i].value.trim();
const name = nameInputs[i]?.value.trim() ?? ""; const name = nameInputs[i]?.value.trim() ?? "";
// Skip emtpy rows // Skip empty rows
if (!smiles && !name) { if (!smiles && !name) {
continue; continue;
} }

View File

@ -43,14 +43,12 @@
class="select select-bordered w-full" class="select select-bordered w-full"
:class="{ 'select-error': $store.validationErrors.hasError(fieldName, context) }" :class="{ 'select-error': $store.validationErrors.hasError(fieldName, context) }"
x-model="value" x-model="value"
:multiple="multiple"
> >
<option value="" :selected="!value">Select...</option> <option value="" :selected="!value">Select...</option>
<template x-for="opt in options" :key="opt"> <template x-for="opt in options" :key="opt">
<option <option :value="opt" x-text="opt"></option>
:value="opt"
:selected="value === opt"
x-text="opt"
></option>
</template> </template>
</select> </select>
</template> </template>

View File

@ -5,6 +5,8 @@
isSubmitting: false, isSubmitting: false,
modelType: '', modelType: '',
buildAppDomain: false, buildAppDomain: false,
requiresRulePackages: false,
requiresDataPackages: false,
reset() { reset() {
this.isSubmitting = false; this.isSubmitting = false;
@ -24,6 +26,21 @@
return this.modelType === 'enviformer'; return this.modelType === 'enviformer';
}, },
get showRulePackages() {
console.log(this.requiresRulePackages);
return this.requiresRulePackages;
},
get showDataPackages() {
return this.requiresDataPackages;
},
updateRequirements(event) {
const option = event.target.selectedOptions[0];
this.requiresRulePackages = option.dataset.requires_rule_packages === 'True';
this.requiresDataPackages = option.dataset.requires_data_packages === 'True';
},
submit(formId) { submit(formId) {
const form = document.getElementById(formId); const form = document.getElementById(formId);
if (form && form.checkValidity()) { if (form && form.checkValidity()) {
@ -111,17 +128,24 @@
name="model-type" name="model-type"
class="select select-bordered w-full" class="select select-bordered w-full"
x-model="modelType" x-model="modelType"
x-on:change="updateRequirements($event)"
required required
> >
<option value="" disabled selected>Select Model Type</option> <option value="" disabled selected>Select Model Type</option>
{% for k, v in model_types.items %} {% for k, v in model_types.items %}
<option value="{{ v }}">{{ k }}</option> <option
value="{{ v.type }}"
data-requires_rule_packages="{{ v.requires_rule_packages }}"
data-requires_data_packages="{{ v.requires_data_packages }}"
>
{{ k }}
</option>
{% endfor %} {% endfor %}
</select> </select>
</div> </div>
<!-- Rule Packages (MLRR, RBRR) --> <!-- Rule Packages (MLRR, RBRR) -->
<div class="form-control mb-3" x-show="showMlrr || showRbrr" x-cloak> <div class="form-control mb-3" x-show="showRulePackages" x-cloak>
<label class="label" for="model-rule-packages"> <label class="label" for="model-rule-packages">
<span class="label-text">Rule Packages</span> <span class="label-text">Rule Packages</span>
</label> </label>
@ -152,11 +176,7 @@
</div> </div>
<!-- Data Packages (MLRR, RBRR, Enviformer) --> <!-- Data Packages (MLRR, RBRR, Enviformer) -->
<div <div class="form-control mb-3" x-show="showDataPackages" x-cloak>
class="form-control mb-3"
x-show="showMlrr || showRbrr || showEnviformer"
x-cloak
>
<label class="label" for="model-data-packages"> <label class="label" for="model-data-packages">
<span class="label-text">Data Packages</span> <span class="label-text">Data Packages</span>
</label> </label>

View File

@ -233,6 +233,25 @@
</div> </div>
</div> </div>
{% if property_models %}
<div class="form-control mb-3">
<label class="label" for="prediction-setting-property-models">
<span class="label-text">Select Property Models</span>
</label>
<select
id="prediction-setting-property-models"
name="prediction-setting-property-models"
class="select select-bordered w-full"
multiple
>
<option value="" disabled selected>Select the model</option>
{% for pm in property_models %}
<option value="{{ pm.url }}">{{ pm.name|safe }}</option>
{% endfor %}
</select>
</div>
{% endif %}
<div class="form-control"> <div class="form-control">
<label class="label cursor-pointer justify-start gap-3"> <label class="label cursor-pointer justify-start gap-3">
<input <input

View File

@ -71,24 +71,129 @@
<label class="label"> <label class="label">
<span class="label-text">User or Group</span> <span class="label-text">User or Group</span>
</label> </label>
<select <div
id="select_grantee" class="relative"
name="grantee" x-data="{
class="select select-bordered w-full select-sm" searchQuery: '',
required selectedItem: null,
> showResults: false,
<optgroup label="Users"> filteredResults: [],
allItems: [
{% for u in users %} {% for u in users %}
<option value="{{ u.url }}">{{ u.username }}</option> { type: 'user', name: '{{ u.username }}', url: '{{ u.url }}',
display: '{{ u.username }}' },
{% endfor %} {% endfor %}
</optgroup>
<optgroup label="Groups">
{% for g in groups %} {% for g in groups %}
<option value="{{ g.url }}">{{ g.name|safe }}</option> { type: 'group', name: '{{ g.name|safe }}', url: '{{ g.url }}',
display: '{{ g.name|safe }}' },
{% endfor %} {% endfor %}
</optgroup> ],
</select> init() {
this.filteredResults = this.allItems;
},
search() {
if (this.searchQuery.length === 0) {
this.filteredResults = this.allItems;
} else {
this.filteredResults = this.allItems.filter(item =>
item.name.toLowerCase().includes(this.searchQuery.toLowerCase())
);
}
this.showResults = true;
},
selectItem(item) {
this.selectedItem = item;
this.searchQuery = item.display;
this.showResults = false;
},
clearSelection() {
this.selectedItem = null;
this.searchQuery = '';
this.showResults = false;
}
}"
@click.away="showResults = false"
>
<input
type="text"
x-model="searchQuery"
@input="search()"
@focus="showResults = true; search()"
@keydown.escape="showResults = false"
@keydown.arrow-down.prevent="$refs.resultsList?.children[0]?.focus()"
class="input input-bordered w-full input-sm"
placeholder="Search users or groups..."
autocomplete="off"
required
/>
<!-- Clear button -->
<button
type="button"
x-show="searchQuery.length > 0"
@click="clearSelection()"
class="absolute right-2 top-1/2 transform -translate-y-1/2 text-gray-400 hover:text-gray-600"
>
</button>
<!-- Hidden input for form submission -->
<input
type="hidden"
name="grantee"
x-bind:value="selectedItem?.url || ''"
required
/>
<!-- Search results dropdown -->
<div
x-show="showResults && filteredResults.length > 0"
x-transition
class="absolute z-50 w-full mt-1 bg-base-100 border border-base-300 rounded-lg shadow-lg max-h-60 overflow-y-auto"
>
<ul x-ref="resultsList" id="resultsList" class="py-1">
<template
x-for="(item, index) in filteredResults"
:key="item.url"
>
<li>
<button
type="button"
@click="selectItem(item)"
@keydown.enter="selectItem(item)"
@keydown.escape="showResults = false"
@keydown.arrow-up.prevent="index > 0 ? $event.target.parentElement.previousElementSibling?.children[0]?.focus() : null"
@keydown.arrow-down.prevent="index < filteredResults.length - 1 ? $event.target.parentElement.nextElementSibling?.children[0]?.focus() : null"
class="w-full px-4 py-2 text-left hover:bg-base-200 focus:bg-base-200 focus:outline-none flex items-center space-x-2"
>
<span
x-text="item.type === 'user' ? '👤' : '👥'"
class="text-sm opacity-60"
></span>
<span x-text="item.display"></span>
<span
x-text="item.type === 'user' ? '(User)' : '(Group)'"
class="text-xs opacity-50 ml-auto"
></span>
</button>
</li>
</template>
</ul>
</div> </div>
<!-- No results message -->
<div
x-show="showResults && filteredResults.length === 0 && searchQuery.length > 0"
x-transition
class="absolute z-50 w-full mt-1 bg-base-100 border border-base-300 rounded-lg shadow-lg"
>
<div class="px-4 py-2 text-gray-500 text-sm">
No users or groups found
</div>
</div>
</div>
</div>
<div class="col-span-2 text-center"> <div class="col-span-2 text-center">
<label class="label justify-center"> <label class="label justify-center">
<span class="label-text">Read</span> <span class="label-text">Read</span>

View File

@ -0,0 +1,144 @@
{% extends "framework_modern.html" %}
{% load static %}
{% load envipytags %}
{% block content %}
{% block action_modals %}
{% include "modals/objects/edit_model_modal.html" %}
{% include "modals/objects/evaluate_model_modal.html" %}
{% include "modals/objects/retrain_model_modal.html" %}
{% include "modals/objects/generic_delete_modal.html" %}
{% endblock action_modals %}
{% block libraries %}
{% endblock %}
<div class="space-y-2 p-4">
<!-- Header Section -->
<div class="card bg-base-100">
<div class="card-body">
<div class="flex items-center justify-between">
<h2 class="card-title text-2xl">{{ model.name }}</h2>
<div id="actionsButton" class="dropdown dropdown-end hidden">
<div tabindex="0" role="button" class="btn btn-ghost btn-sm">
<svg
xmlns="http://www.w3.org/2000/svg"
width="16"
height="16"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
class="lucide lucide-wrench"
>
<path
d="M14.7 6.3a1 1 0 0 0 0 1.4l1.6 1.6a1 1 0 0 0 1.4 0l3.77-3.77a6 6 0 0 1-7.94 7.94l-6.91 6.91a2.12 2.12 0 0 1-3-3l6.91-6.91a6 6 0 0 1 7.94-7.94l-3.76 3.76z"
/>
</svg>
Actions
</div>
<ul
tabindex="-1"
class="dropdown-content menu bg-base-100 rounded-box z-50 w-52 p-2"
>
{% block actions %}
{% include "actions/objects/model.html" %}
{% endblock %}
</ul>
</div>
</div>
<p class="mt-2">{{ model.description|safe }}</p>
</div>
</div>
<!-- Model Status -->
<div class="collapse-arrow bg-base-200 collapse">
<input type="checkbox" checked />
<div class="collapse-title text-xl font-medium">Model Status</div>
<div class="collapse-content">{{ model.status }}</div>
</div>
{% block packages %}
{% if model.rule_packages.all|length > 0 %}
<!-- Rule Packages -->
<div class="collapse-arrow bg-base-200 collapse">
<input type="checkbox" checked />
<div class="collapse-title text-xl font-medium">Rule Packages</div>
<div class="collapse-content">
<ul class="menu bg-base-100 rounded-box w-full">
{% for p in model.rule_packages.all %}
<li>
<a href="{{ p.url }}" class="hover:bg-base-200"
>{{ p.name }}</a
>
</li>
{% endfor %}
</ul>
</div>
</div>
{% endif %}
{% if model.data_packages.all|length > 0 %}
<!-- Reaction Packages -->
<div class="collapse-arrow bg-base-200 collapse">
<input type="checkbox" checked />
<div class="collapse-title text-xl font-medium">
Reaction Packages
</div>
<div class="collapse-content">
<ul class="menu bg-base-100 rounded-box w-full">
{% for p in model.data_packages.all %}
<li>
<a href="{{ p.url }}" class="hover:bg-base-200"
>{{ p.name }}</a
>
</li>
{% endfor %}
</ul>
</div>
</div>
{% endif %}
{% if model.eval_packages.all|length > 0 %}
<!-- Eval Packages -->
<div class="collapse-arrow bg-base-200 collapse">
<input type="checkbox" checked />
<div class="collapse-title text-xl font-medium">Eval Packages</div>
<div class="collapse-content">
<ul class="menu bg-base-100 rounded-box w-full">
{% for p in model.eval_packages.all %}
<li>
<a href="{{ p.url }}" class="hover:bg-base-200"
>{{ p.name }}</a
>
</li>
{% endfor %}
</ul>
</div>
</div>
{% endif %}
{% endblock %}
{% block usemodel %}
{% endblock %}
{% block evaluation %}
{% endblock %}
</div>
<script>
function makeLoadingGif(selector, gifPath) {
const element = document.querySelector(selector);
if (element) {
element.innerHTML = '<img src="' + gifPath + '" alt="Loading...">';
}
}
function clear(divid) {
const element = document.getElementById(divid);
if (element) {
element.classList.remove("alert", "alert-error");
element.innerHTML = "";
}
}
</script>
{% endblock content %}

View File

@ -0,0 +1,430 @@
{% extends "objects/model/_model_base.html" %}
{% load static %}
{% load envipytags %}
{% block libraries %}
<!-- Include required libs -->
<script src="https://d3js.org/d3.v5.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/c3@0.7.20/c3.min.js"></script>
<link
href="https://cdn.jsdelivr.net/npm/c3@0.7.20/c3.min.css"
rel="stylesheet"
/>
{% endblock %}
{% block usemodel %}
{% if model.ready_for_prediction %}
<!-- Predict Panel -->
<div class="collapse-arrow bg-base-200 collapse">
<input type="checkbox" checked />
<div class="collapse-title text-xl font-medium" id="predictTitle">
Predict
</div>
<div class="collapse-content">
<div class="form-control">
<div class="join w-full">
<input
id="smiles-to-predict"
type="text"
class="input input-bordered join-item grow"
placeholder="CCN(CC)C(=O)C1=CC(=CC=C1)C"
/>
<button
class="btn btn-primary join-item"
type="button"
id="predict-button"
>
Predict!
</button>
</div>
</div>
<div id="predictLoading" class="mt-2 flex hidden justify-center">
<div class="h-8 w-8">
{% include "components/loading-spinner.html" %}
</div>
</div>
<div id="predictResultTable" class="mt-4"></div>
</div>
</div>
{% endif %}
{% if model.ready_for_prediction and model.app_domain %}
<!-- App Domain -->
<div class="collapse-arrow bg-base-200 collapse">
<input type="checkbox" checked />
<div class="collapse-title text-xl font-medium">
Applicability Domain Assessment
</div>
<div class="collapse-content">
<div class="form-control">
<div class="join w-full">
<input
id="smiles-to-assess"
type="text"
class="input input-bordered join-item grow"
placeholder="CCN(CC)C(=O)C1=CC(=CC=C1)C"
/>
<button
class="btn btn-primary join-item"
type="button"
id="assess-button"
>
Assess!
</button>
</div>
</div>
<div id="appDomainLoading" class="mt-2 flex hidden justify-center">
<div class="h-8 w-8">
{% include "components/loading-spinner.html" %}
</div>
</div>
<div id="appDomainAssessmentResultTable" class="mt-4"></div>
</div>
</div>
{% endif %}
<script>
function handlePredictionResponse(data) {
let stereo = data["stereo"];
data = data["pred"];
let res = "";
if (stereo) {
res +=
"<span class='alert alert-warning alert-soft'>Removed stereochemistry for prediction</span><br>";
}
res += "<table class='table table-zebra'>";
res += "<thead>";
res += "<th scope='col'>#</th>";
const columns = ["products", "image", "probability", "btrule"];
for (const col of columns) {
res += "<th scope='col'>" + col + "</th>";
}
res += "</thead>";
res += "<tbody>";
let cnt = 1;
for (const transformation in data) {
res += "<tr>";
res += "<th scope='row'>" + cnt + "</th>";
res +=
"<th scope='row'>" +
data[transformation]["products"][0].join(", ") +
"</th>";
res +=
"<th scope='row'>" +
"<img width='400' src='{% url 'depict' %}?smiles=" +
encodeURIComponent(data[transformation]["products"][0].join(".")) +
"'></th>";
res +=
"<th scope='row'>" +
data[transformation]["probability"].toFixed(3) +
"</th>";
if (data[transformation]["btrule"] != null) {
res +=
"<th scope='row'>" +
"<a href='" +
data[transformation]["btrule"]["url"] +
"' class='link link-primary'>" +
data[transformation]["btrule"]["name"] +
"</a>" +
"</th>";
} else {
res += "<th scope='row'>N/A</th>";
}
res += "</tr>";
cnt += 1;
}
res += "</tbody>";
res += "</table>";
const resultTable = document.getElementById("predictResultTable");
if (resultTable) {
resultTable.innerHTML = res;
}
}
document.addEventListener("DOMContentLoaded", function () {
// Show actions button if there are actions
const actionsButton = document.getElementById("actionsButton");
const actionsList = actionsButton?.querySelector("ul");
if (actionsList && actionsList.children.length > 0) {
actionsButton?.classList.remove("hidden");
}
// Predict button handler
const predictButton = document.getElementById("predict-button");
if (predictButton) {
predictButton.addEventListener("click", function (e) {
e.preventDefault();
clear("predictResultTable");
const smilesInput = document.getElementById("smiles-to-predict");
const smiles = smilesInput ? smilesInput.value.trim() : "";
if (smiles === "") {
const resultTable = document.getElementById("predictResultTable");
if (resultTable) {
resultTable.classList.add("alert", "alert-error");
resultTable.innerHTML =
"Please enter a SMILES string to predict!";
}
return;
}
const loadingEl = document.getElementById("predictLoading");
if (loadingEl) loadingEl.classList.remove("hidden");
const params = new URLSearchParams({
smiles: smiles,
classify: "ILikeCats!",
});
fetch("?" + params.toString(), {
method: "GET",
headers: {
"X-CSRFToken":
document.querySelector("[name=csrf-token]").content,
},
})
.then((response) => {
if (!response.ok) {
return response.json().then((err) => {
throw err;
});
}
return response.json();
})
.then((data) => {
const loadingEl = document.getElementById("predictLoading");
if (loadingEl) loadingEl.classList.add("hidden");
handlePredictionResponse(data);
})
.catch((error) => {
const loadingEl = document.getElementById("predictLoading");
if (loadingEl) loadingEl.classList.add("hidden");
const resultTable = document.getElementById("predictResultTable");
if (resultTable) {
resultTable.classList.add("alert", "alert-error");
resultTable.innerHTML =
error.error || "Error while processing response :/";
}
});
});
}
// Assess button handler
const assessButton = document.getElementById("assess-button");
if (assessButton) {
assessButton.addEventListener("click", function (e) {
e.preventDefault();
clear("appDomainAssessmentResultTable");
const smilesInput = document.getElementById("smiles-to-assess");
const smiles = smilesInput ? smilesInput.value.trim() : "";
if (smiles === "") {
const resultTable = document.getElementById(
"appDomainAssessmentResultTable",
);
if (resultTable) {
resultTable.classList.add("alert", "alert-error");
resultTable.innerHTML =
"Please enter a SMILES string to predict!";
}
return;
}
const loadingEl = document.getElementById("appDomainLoading");
if (loadingEl) loadingEl.classList.remove("hidden");
const params = new URLSearchParams({
smiles: smiles,
"app-domain-assessment": "ILikeCats!",
});
fetch("?" + params.toString(), {
method: "GET",
headers: {
"X-CSRFToken":
document.querySelector("[name=csrf-token]").content,
},
})
.then((response) => {
if (!response.ok) {
return response.json().then((err) => {
throw err;
});
}
return response.json();
})
.then((data) => {
const loadingEl = document.getElementById("appDomainLoading");
if (loadingEl) loadingEl.classList.add("hidden");
if (typeof handleAssessmentResponse === "function") {
handleAssessmentResponse("{% url 'depict' %}", data);
}
console.log(data);
})
.catch((error) => {
const loadingEl = document.getElementById("appDomainLoading");
if (loadingEl) loadingEl.classList.add("hidden");
const resultTable = document.getElementById(
"appDomainAssessmentResultTable",
);
if (resultTable) {
resultTable.classList.add("alert", "alert-error");
resultTable.innerHTML =
error.error || "Error while processing response :/";
}
});
});
}
});
</script>
{% endblock %}
{% block evaluation %}
{# prettier-ignore-start #}
{% if model.model_status == 'FINISHED' %}
<!-- Single Gen Curve Panel -->
<div class="collapse-arrow bg-base-200 collapse">
<input type="checkbox" checked/>
<div class="collapse-title text-xl font-medium">
Precision Recall Curve
</div>
<div class="collapse-content">
<div class="flex justify-center">
<div id="sg-chart"></div>
</div>
</div>
</div>
{% if model.multigen_eval %}
<div class="collapse-arrow bg-base-200 collapse">
<input type="checkbox" checked/>
<div class="collapse-title text-xl font-medium">
Multi Gen Precision Recall Curve
</div>
<div class="collapse-content">
<div class="flex justify-center">
<div id="mg-chart"></div>
</div>
</div>
</div>
{% endif %}
{% endif %}
<script>
function makeChart(selector, data) {
const x = ['Recall'];
const y = ['Precision'];
const thres = ['threshold'];
function compare(a, b) {
if (a.threshold < b.threshold)
return -1;
else if (a.threshold > b.threshold)
return 1;
else
return 0;
}
function getIndexForValue(data, val, val_name) {
for (const idx in data) {
if (data[idx][val_name] == val) {
return idx;
}
}
return -1;
}
if (!data || data.length === 0) {
console.warn('PR curve data is empty');
return;
}
const dataLength = data.length;
data.sort(compare);
for (const idx in data) {
const d = data[idx];
x.push(d.recall);
y.push(d.precision);
thres.push(d.threshold);
}
const chart = c3.generate({
bindto: selector,
data: {
onclick: function (d, e) {
const idx = d.index;
const thresh = data[dataLength - idx - 1].threshold;
},
x: 'Recall',
y: 'Precision',
columns: [
x,
y,
]
},
size: {
height: 400,
width: 480
},
axis: {
x: {
max: 1,
min: 0,
label: 'Recall',
padding: 0,
tick: {
fit: true,
values: [0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0]
}
},
y: {
max: 1,
min: 0,
label: 'Precision',
padding: 0,
tick: {
fit: true,
values: [0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0]
}
}
},
point: {
r: 4
},
tooltip: {
format: {
title: function (recall) {
const idx = getIndexForValue(data, recall, "recall");
if (idx != -1) {
return "Threshold: " + data[idx].threshold;
}
return "";
},
value: function (precision, ratio, id) {
return undefined;
}
}
},
zoom: {
enabled: true
}
});
}
document.addEventListener('DOMContentLoaded', function () {
{% if model.model_status == 'FINISHED' %}
// Precision Recall Curve
makeChart('#sg-chart', {{ model.pr_curve|safe }});
{% if model.multigen_eval %}
// Multi Gen Precision Recall Curve
makeChart('#mg-chart', {{ model.mg_pr_curve|safe }});
{% endif %}
{% endif %}
});
</script>
{# prettier-ignore-end #}
{% endblock %}

View File

@ -0,0 +1,168 @@
{% extends "objects/model/_model_base.html" %}
{% load static %}
{% load envipytags %}
{% block libraries %}
{% endblock %}
{% block usemodel %}
{% if model.ready_for_prediction %}
<!-- Predict Panel -->
<div class="collapse-arrow bg-base-200 collapse">
<input type="checkbox" checked />
<div class="collapse-title text-xl font-medium" id="predictTitle">
Predict
</div>
<div class="collapse-content">
<div class="form-control">
<div class="join w-full">
<input
id="smiles-to-predict"
type="text"
class="input input-bordered join-item grow"
placeholder="CCN(CC)C(=O)C1=CC(=CC=C1)C"
/>
<button
class="btn btn-primary join-item"
type="button"
id="predict-button"
>
Predict!
</button>
</div>
</div>
<div id="predictLoading" class="mt-2 flex hidden justify-center">
<div class="h-8 w-8">
{% include "components/loading-spinner.html" %}
</div>
</div>
<div id="predictResultTable" class="mt-4"></div>
</div>
</div>
{% endif %}
<script>
document.addEventListener("DOMContentLoaded", function () {
// Show actions button if there are actions
const actionsButton = document.getElementById("actionsButton");
const actionsList = actionsButton?.querySelector("ul");
if (actionsList && actionsList.children.length > 0) {
actionsButton?.classList.remove("hidden");
}
// Predict button handler
const predictButton = document.getElementById("predict-button");
if (predictButton) {
predictButton.addEventListener("click", function (e) {
e.preventDefault();
clear("predictResultTable");
const smilesInput = document.getElementById("smiles-to-predict");
const smiles = smilesInput ? smilesInput.value.trim() : "";
if (smiles === "") {
const resultTable = document.getElementById("predictResultTable");
if (resultTable) {
resultTable.classList.add("alert", "alert-error");
resultTable.innerHTML =
"Please enter a SMILES string to predict!";
}
return;
}
const loadingEl = document.getElementById("predictLoading");
if (loadingEl) loadingEl.classList.remove("hidden");
const params = new URLSearchParams({
smiles: smiles,
half_life: "ILikeCats!",
});
fetch("?" + params.toString(), {
method: "GET",
headers: {
"X-CSRFToken":
document.querySelector("[name=csrf-token]").content,
},
})
.then((response) => {
if (!response.ok) {
return response.json().then((err) => {
throw err;
});
}
return response.json();
})
.then((data) => {
const loadingEl = document.getElementById("predictLoading");
if (loadingEl) {
loadingEl.classList.add("hidden");
}
if (data.svg === null) {
document.getElementById("predictResultTable").innerHTML =
"<span class='alert alert-error alert-soft'>Processing failed...</span><br>";
return;
}
handlePredictionResponse(data);
})
.catch((error) => {
const loadingEl = document.getElementById("predictLoading");
if (loadingEl) loadingEl.classList.add("hidden");
const resultTable = document.getElementById("predictResultTable");
if (resultTable) {
resultTable.classList.add("alert", "alert-error");
resultTable.innerHTML =
error.error || "Error while processing response :/";
}
});
});
}
});
function handlePredictionResponse(data) {
let stereo = data["stereo"];
data = data["svg"];
let res = "";
if (stereo) {
res +=
"<span class='alert alert-warning'>Removed stereochemistry for prediction</span><br>";
}
res += "<div class='flex justify-center'>" + data + "<\div>";
const resultTable = document.getElementById("predictResultTable");
if (resultTable) {
resultTable.innerHTML = res;
}
}
</script>
{% endblock %}
{% block evaluation %}
{% if model.model_status == 'FINISHED' %}
<!-- Model Statistics Panel -->
<div class="collapse-arrow bg-base-200 collapse">
<input type="checkbox" checked />
<div class="collapse-title text-xl font-medium">Model Statistics</div>
<div class="collapse-content">
<div class="flex justify-center">
<div
id="model-stats"
class="overflow-x-auto rounded-box shadow-md bg-base-100"
>
<table class="table table-fixed w-full">
<thead class="text-base">
<tr>
<th class="w-1/5">Metric</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for metric, value in model.eval_results.items %}
<tr>
<td>{{ metric|upper }}</td>
<td>{{ value|floatformat:4 }}</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
</div>
</div>
{% endif %}
{% endblock %}

View File

@ -160,7 +160,7 @@
</div> </div>
<ul <ul
tabindex="0" tabindex="0"
class="dropdown-content menu bg-base-100 rounded-box z-50 w-52 p-2" class="dropdown-content menu bg-base-100 rounded-box z-50 w-60 p-2"
> >
{% if pathway.setting.model.app_domain %} {% if pathway.setting.model.app_domain %}
<li> <li>
@ -206,6 +206,37 @@
OECD 301F View OECD 301F View
</a> </a>
</li> </li>
<li>
<a id="pred-prop-toggle-button" class="cursor-pointer">
<svg
id="pred-prop-icon"
xmlns="http://www.w3.org/2000/svg"
width="16"
height="16"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
d="M8.25 6.75h12M8.25 12h12m-12 5.25h12M3.75 6.75h.007v.008H3.75V6.75Zm.375 0a.375.375 0 1 1-.75 0 .375.375 0 0 1 .75 0ZM3.75 12h.007v.008H3.75V12Zm.375 0a.375.375 0 1 1-.75 0 .375.375 0 0 1 .75 0Zm-.375 5.25h.007v.008H3.75v-.008Zm.375 0a.375.375 0 1 1-.75 0 .375.375 0 0 1 .75 0Z"
/>
</svg>
<svg
class="slash"
viewBox="0 0 100 30"
preserveAspectRatio="none"
aria-hidden="true"
>
<line x1="0" y1="30" x2="100" y2="0" />
</svg>
Show Predicted Properties
</a>
</li>
</ul> </ul>
</div> </div>
</div> </div>
@ -441,6 +472,8 @@
var appDomainViewEnabled = false; var appDomainViewEnabled = false;
// Global switch for timeseries view // Global switch for timeseries view
var timeseriesViewEnabled = false; var timeseriesViewEnabled = false;
// Predicted Property View
var predictedPropertyViewEnabled = false;
function goFullscreen(id) { function goFullscreen(id) {
var element = document.getElementById(id); var element = document.getElementById(id);
@ -563,6 +596,23 @@
}); });
} }
// Predicted Propertes toggle
const predPropBtn = document.getElementById("pred-prop-toggle-button");
if (predPropBtn) {
predPropBtn.addEventListener("click", function () {
predictedPropertyViewEnabled = !predictedPropertyViewEnabled;
const icon = document.getElementById("pred-prop-icon");
if (predictedPropertyViewEnabled) {
icon.innerHTML +=
'<svg class="slash" viewBox="0 0 100 30" preserveAspectRatio="none" aria-hidden="true"><line x1="0" y1="30" x2="100" y2="0"/></svg>';
} else {
icon.innerHTML =
'<path stroke-linecap="round" stroke-linejoin="round" d="M8.25 6.75h12M8.25 12h12m-12 5.25h12M3.75 6.75h.007v.008H3.75V6.75Zm.375 0a.375.375 0 1 1-.75 0 .375.375 0 0 1 .75 0ZM3.75 12h.007v.008H3.75V12Zm.375 0a.375.375 0 1 1-.75 0 .375.375 0 0 1 .75 0Zm-.375 5.25h.007v.008H3.75v-.008Zm.375 0a.375.375 0 1 1-.75 0 .375.375 0 0 1 .75 0Z" />';
}
});
}
// Show actions button if there are actions // Show actions button if there are actions
const actionsButton = document.getElementById("actionsButton"); const actionsButton = document.getElementById("actionsButton");
const actionsList = actionsButton?.querySelector("ul"); const actionsList = actionsButton?.querySelector("ul");

View File

@ -123,7 +123,64 @@
</p> </p>
</template> </template>
<template x-for="item in items" :key="item.uuid"> <template
x-for="item in items.filter(i => i.attach_object === null)"
:key="item.uuid"
>
<div class="card bg-base-200 shadow-sm">
<div class="card-body p-4">
<div class="flex items-start justify-between">
<div
class="flex-1"
x-data="schemaRenderer({
rjsf: schemas[item.type.toLowerCase()],
data: item.data,
mode: 'view'
})"
x-init="init()"
>
{% include "components/schema_form.html" %}
</div>
{% if meta.can_edit %}
<button
class="btn btn-sm btn-ghost ml-2"
@click="deleteItem(item.uuid)"
>
<svg
xmlns="http://www.w3.org/2000/svg"
width="16"
height="16"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
class="lucide lucide-trash"
>
<path d="M3 6h18" />
<path d="M19 6v14c0 1-1 2-2 2H7c-1 0-2-1-2-2V6" />
<path d="M8 6V4c0-1 1-2 2-2h4c1 0 2 1 2 2v2" />
</svg>
</button>
{% endif %}
</div>
</div>
</div>
</template>
<template
x-if="items.filter(i => i.attach_object !== null).length !== 0"
>
<h4 class="card-title mb-4 text-lg">
Additional Information that are attached to objects referring
to this Scenario
</h4>
</template>
<template
x-for="item in items.filter(i => i.attach_object !== null)"
:key="item.uuid"
>
<div class="card bg-base-200 shadow-sm"> <div class="card bg-base-200 shadow-sm">
<div class="card-body p-4"> <div class="card-body p-4">
<div class="flex items-start justify-between"> <div class="flex items-start justify-between">
@ -171,82 +228,6 @@
</div> </div>
</div> </div>
{% if scenario.parent %}
<div class="card bg-base-100">
<div class="card-body">
<h3 class="card-title mb-4 text-lg">
Parent Scenario Additional Information
</h3>
<div
x-data="{
items: [],
schemas: {},
loading: true,
error: null,
async init() {
try {
// Use the unified API client for loading data
const { items, schemas } = await window.AdditionalInformationApi.loadSchemasAndItems('{{ scenario.parent.uuid }}');
this.items = items;
this.schemas = schemas;
} catch (err) {
this.error = err.message;
console.error('Error loading additional information:', err);
} finally {
this.loading = false;
}
},
}"
>
<!-- Loading state -->
<template x-if="loading">
<div class="flex items-center justify-center p-4">
<span class="loading loading-spinner loading-md"></span>
</div>
</template>
<!-- Error state -->
<template x-if="error">
<div class="alert alert-error mb-4">
<span x-text="error"></span>
</div>
</template>
<!-- Items list -->
<template x-if="!loading && !error">
<div class="space-y-4">
<template x-if="items.length === 0">
<p class="text-base-content/60">
No additional information available.
</p>
</template>
<template x-for="item in items" :key="item.uuid">
<div class="card bg-base-200 shadow-sm">
<div class="card-body p-4">
<div class="flex items-start justify-between">
<div
class="flex-1"
x-data="schemaRenderer({
rjsf: schemas[item.type.toLowerCase()],
data: item.data,
mode: 'view'
})"
x-init="init()"
>
{% include "components/schema_form.html" %}
</div>
</div>
</div>
</div>
</template>
</div>
</template>
</div>
</div>
</div>
{% endif %}
<!-- Pathways --> <!-- Pathways -->
{% if scenario.related_pathways %} {% if scenario.related_pathways %}
<div class="collapse-arrow bg-base-200 collapse"> <div class="collapse-arrow bg-base-200 collapse">
@ -265,43 +246,6 @@
</div> </div>
</div> </div>
{% endif %} {% endif %}
<!-- Related Scenarios -->
{% if children.exists %}
<div class="collapse-arrow bg-base-200 collapse">
<input type="checkbox" />
<div class="collapse-title text-xl font-medium">Related Scenarios</div>
<div class="collapse-content">
<ul class="menu bg-base-100 rounded-box">
{% for s in children %}
<li>
<a href="{{ s.url }}" class="hover:bg-base-200"
>{{ s.name }} <i>({{ s.package.name }})</i></a
>
</li>
{% endfor %}
</ul>
</div>
</div>
{% endif %}
<!-- Parent Scenarios -->
{% if scenario.parent %}
<div class="collapse-arrow bg-base-200 collapse">
<input type="checkbox" />
<div class="collapse-title text-xl font-medium">Parent Scenario</div>
<div class="collapse-content">
<ul class="menu bg-base-100 rounded-box">
<li>
<a href="{{ scenario.parent.url }}" class="hover:bg-base-200"
>{{ scenario.parent.name }}
<i>({{ scenario.parent.package.name }})</i></a
>
</li>
</ul>
</div>
</div>
{% endif %}
</div> </div>
<script> <script>

View File

@ -87,6 +87,39 @@
<td>Expansion Scheme</td> <td>Expansion Scheme</td>
<td>{{ setting_to_render.expansion_scheme }}</td> <td>{{ setting_to_render.expansion_scheme }}</td>
</tr> </tr>
{% if setting_to_render.property_models.all %}
<tr>
{% for prop_model in setting_to_render.property_models.all %}
<td>Property Models</td>
<td>
<div class="space-y-2">
<table class="table-xs table">
<thead>
<tr>
<th>Property Type</th>
<th>Model</th>
</tr>
</thead>
<tbody>
<tr>
<td>{{ prop_model.instance.display }}</td>
<td>
<a
href="{{ prop_model.url }}"
class="link link-primary"
>
{{ prop_model.name }}
</a>
</td>
</tr>
</tbody>
</table>
</div>
</td>
{% endfor %}
</tr>
{% endif %}
</tbody> </tbody>
</table> </table>
</div> </div>

View File

@ -82,13 +82,13 @@
<div class="form-control"> <div class="form-control">
<label class="label" for="username"> <label class="label" for="username">
<span class="label-text">Username</span> <span class="label-text">Account</span>
</label> </label>
<input <input
type="text" type="text"
id="username" id="username"
name="username" name="username"
placeholder="username" placeholder="Username or Email"
class="input input-bordered w-full" class="input input-bordered w-full"
required required
autocomplete="username" autocomplete="username"
@ -164,6 +164,8 @@
name="username" name="username"
placeholder="username" placeholder="username"
class="input input-bordered w-full" class="input input-bordered w-full"
pattern="^[A-Za-z0-9@.+_\-]{3,150}$"
title="Only letters, numbers, and @ . + - _ are allowed"
required required
autocomplete="username" autocomplete="username"
/> />

View File

@ -58,8 +58,8 @@ class EnviPyStaticLiveServerTestCase(StaticLiveServerTestCase):
def login(self): def login(self):
"""Sign in with the test user, 'user0'""" """Sign in with the test user, 'user0'"""
self.page.goto(self.live_server_url + "/login") self.page.goto(self.live_server_url + "/login")
self.page.get_by_role("textbox", name="Username").click() self.page.get_by_role("textbox", name="Account").click()
self.page.get_by_role("textbox", name="Username").fill(self.username) self.page.get_by_role("textbox", name="Account").fill(self.username)
self.page.get_by_role("textbox", name="Password").click() self.page.get_by_role("textbox", name="Password").click()
self.page.get_by_role("textbox", name="Password").fill(self.password) self.page.get_by_role("textbox", name="Password").fill(self.password)

View File

@ -36,8 +36,8 @@ class TestLoginPage(EnviPyStaticLiveServerTestCase):
u.is_active = True u.is_active = True
u.save() u.save()
page.get_by_role("textbox", name="Username").click() page.get_by_role("textbox", name="Account").click()
page.get_by_role("textbox", name="Username").fill("newuser") page.get_by_role("textbox", name="Account").fill("newuser")
page.get_by_role("textbox", name="Password").click() page.get_by_role("textbox", name="Password").click()
page.get_by_role("textbox", name="Password").fill("NewUser_1") page.get_by_role("textbox", name="Password").fill("NewUser_1")
page.get_by_role("button", name="Sign In").click() page.get_by_role("button", name="Sign In").click()

View File

@ -20,7 +20,16 @@ class TestPackagePage(EnviPyStaticLiveServerTestCase):
page.get_by_role("button", name="Actions").click() page.get_by_role("button", name="Actions").click()
page.get_by_role("button", name="Edit Permissions").click() page.get_by_role("button", name="Edit Permissions").click()
# Add read and write permission to enviPath Users group # Add read and write permission to enviPath Users group
page.locator("#select_grantee").select_option(label="enviPath Users") search_input = page.locator('input[placeholder="Search users or groups..."]')
search_input.fill("enviPath")
# Wait for the results list to appear and be populated
page.wait_for_selector("#resultsList", state="visible")
# Click the first button in the results list
first_button = page.locator("#resultsList button").first
first_button.click()
page.locator("#read_new").check() page.locator("#read_new").check()
page.locator("#write_new").check() page.locator("#write_new").check()
page.get_by_role("button", name="+", exact=True).click() page.get_by_role("button", name="+", exact=True).click()

View File

@ -6,5 +6,5 @@ from utilities.chem import FormatConverter
class FormatConverterTestCase(TestCase): class FormatConverterTestCase(TestCase):
def test_standardization(self): def test_standardization(self):
smiles = "C[n+]1c([N-](C))cccc1" smiles = "C[n+]1c([N-](C))cccc1"
standardized_smiles = FormatConverter.standardize(smiles) standardized_smiles = FormatConverter.standardize(smiles, remove_stereo=True)
self.assertEqual(standardized_smiles, "CN=C1C=CC=CN1C") self.assertEqual(standardized_smiles, "CN=C1C=CC=CN1C")

View File

@ -474,7 +474,7 @@ class FormatConverter(object):
for smi in l_smiles: for smi in l_smiles:
try: try:
smi = FormatConverter.standardize( smi = FormatConverter.standardize(
smi, canonicalize_tautomers=canonicalize_tautomers smi, remove_stereo=True, canonicalize_tautomers=canonicalize_tautomers
) )
except Exception: except Exception:
# :shrug: # :shrug:
@ -488,7 +488,9 @@ class FormatConverter(object):
if standardize: if standardize:
for smi in r_smiles: for smi in r_smiles:
try: try:
smi = FormatConverter.standardize(smi) smi = FormatConverter.standardize(
smi, remove_stereo=True, canonicalize_tautomers=canonicalize_tautomers
)
except Exception: except Exception:
# :shrug: # :shrug:
# logger.debug(f'Standardizing SMILES failed for {smi}') # logger.debug(f'Standardizing SMILES failed for {smi}')

684
utilities/legacy.py Normal file
View File

@ -0,0 +1,684 @@
# legacy counts:
# count property
# 8618 referringscenario
# 6572 halflife
# 5332 acidity
# 5253 temperature
# 5235 spikecompound
# 5096 soiltexture1
# 4939 waterstoragecapacity
# 4783 soiltexture2
# 4750 soilsource
# 4736 redox
# 4681 omcontent
# 4299 cec
# 4101 spikeconcentration
# 3967 humidity
# 3317 soilclassificationsystem
# 3154 biomass
# 2750 minormajor
# 1776 bulkdens
# 1588 initorganism
# 1499 reference
# 1383 enzyme
# 1144 sourcescenario
# 583 confidencelevel
# 477 acidity_ws
# 477 halflife_ws
# 477 samplelocation
# 451 organiccontent
# 368 organiccarbonwater
# 331 rateconstant
# 313 rulelikelihood
# 304 columnheight
# 291 redoxpotential
# 289 oxygencontent
# 276 biomass_ws
# 247 initialmasssediment
# 243 initialvolumewater
# 101 proposedintermediate
# 85 bioreactor
# 85 finalcompoundconcentration
# 75 purposeofwwtp
# 75 typeofaeration
# 74 location
# 68 inoculumsource
# 66 samplingdepth
# 60 originalsludgeamount
# 53 tts
# 52 typeofaddition
# 51 solventforcompoundsolution
# 50 sourceofliquidmatrix
# 43 sludgeretentiontime
# 36 biologicaltreatmenttechnology
# 22 aerationtype
# 16 dissolvedorganiccarbon
# 13 additionofnutrients
# 11 nitrogencontent
# 9 oxygendemand
# 8 phosphoruscontent
# 6 Dissolvedoxygenconcentration
# 4 oxygenuptakerate
# 1 amionauptakerate
# 1 volatiletts
import logging
from envipy_additional_information import HalfLife, HalfLifeWS
from envipy_additional_information.information import Interval
from envipy_additional_information.parsers import (
AcidityParser,
AdditionParser,
AerationTypeParser,
AmmoniaUptakeRateParser,
BiologicalTreatmentTechnologyParser,
BiomassParser,
BioReactorParser,
BulkDensityParser,
CECParser,
ColumnHeightParser,
CompoundSolutionSolventParser,
ConfidenceParser,
DissolvedOrganicCarbonParser,
DissolvedOxygenConcentrationParser,
FinalCompoundConcentrationParser,
HumidityParser,
InitialSedimentMassParser,
InitialVolumeWaterParser,
InoculumSourceParser,
IntervalParser,
LocationParser,
NitrogenContentParser,
NutrientsParser,
OMContentParser,
OrganicCarbonWaterParser,
OrganicContentParser,
OriginalSludgeAmountParser,
OxygenContentParser,
OxygenDemandParser,
PFASManufacturingCategoryParser,
PhosphorusContentParser,
ProposedIntermediateParser,
RateConstantParser,
RedoxParser,
RedoxPotentialParser,
ReferenceParser,
RuleLikelihoodParser,
SampleLocationParser,
SamplingDepthParser,
SludgeRetentionTimeParser,
SoilClassificationParser,
SoilSourceParser,
SoilTexture1Parser,
SoilTexture2Parser,
SpikeCompoundParser,
SpikeConcentrationParser,
TemperatureParser,
TotalOrganicCarbonParser,
TotalSuspendedSolidsParser,
TransformationProductImportanceParser,
VolatileTotalSuspendedSolidsParser,
WaterSedimentAcidityParser,
WaterSedimentBiomassParser,
WaterStorageCapacityParser,
WWTPPurposeParser,
LiquidMatrixSourceParser,
OxygenUptakeRateParser,
InitiatingOrganismParser,
)
logger = logging.getLogger(__name__)
def extract_influent_effluent(request, influentName, effluentName):
influent = get_parameter_or_empty_string(request, influentName)
effluent = get_parameter_or_empty_string(request, effluentName)
return influent + ";" + effluent
def get_parameter(request, paramname):
res = request.POST.get(paramname)
if res is not None and res.strip() != "":
return res
return ValueError("Not all parameters are set!")
def get_parameter_or_empty_string(request, paramname):
return request.POST.get(paramname, "")
def extract_range(request, value1, value2):
start = get_parameter_or_empty_string(request, value1)
end = get_parameter_or_empty_string(request, value2)
if start == "":
start = end
if end == "":
end = start
return start + ";" + end
def build_additional_information_from_request(request, type_):
try:
if type_ == "amionauptakerate":
data = extract_range(request, "amionauptakerateStart", "amionauptakerateEnd")
return AmmoniaUptakeRateParser.from_string(data)
elif type_ == "biomass":
start = get_parameter_or_empty_string(request, "biomassStart")
start = "-1" if start == "" else start
end = get_parameter_or_empty_string(request, "biomassEnd")
end = "-1" if end == "" else end
data = f"{start};{end}"
return BiomassParser.from_string(data)
elif type_ == "bulkdens":
data = get_parameter(request, "bulkdensity")
return BulkDensityParser.from_string(data)
elif type_ == "cec":
data = get_parameter(request, "cecdata")
return CECParser.from_string(data)
elif type_ == "humidity":
# humidity can be just a double or a double - condition pair
exp_humid = get_parameter(request, "expHumid")
try:
hum_conditions = get_parameter(request, "humConditions")
data = f"{exp_humid} - {hum_conditions}"
except ValueError:
data = exp_humid
return HumidityParser.from_string(data)
elif type_ == "omcontent":
value_om = get_parameter_or_empty_string(request, "omcontentInOM")
value_oc = get_parameter_or_empty_string(request, "omcontentINOC")
data = f"{value_om};OM;{value_oc};OC"
return OMContentParser.from_string(data)
elif type_ == "organiccontent":
oc_content = extract_range(request, "OC_content_low", "OC_content_high")
om_content = extract_range(request, "OM_content_low", "OM_content_high")
data = f"{oc_content};{om_content}"
return OrganicContentParser.from_string(data)
elif type_ == "organiccarbonwater":
toc_water = extract_range(request, "TOC_low", "TOC_high")
doc_water = extract_range(request, "DOC_low", "DOC_high")
data = f"{toc_water};{doc_water}"
return OrganicCarbonWaterParser.from_string(data)
elif type_ == "redox":
data = get_parameter(request, "redoxType")
return RedoxParser.from_string(data)
elif type_ == "redoxpotential":
value_range_water = extract_range(request, "lowPotentialWater", "highPotentialWater")
value_range_sediment = extract_range(
request, "lowPotentialSediment", "highPotentialSediment"
)
data = f"{value_range_water};{value_range_sediment}"
return RedoxPotentialParser.from_string(data)
elif type_ == "samplelocation":
data = get_parameter(request, "samplelocation")
return SampleLocationParser.from_string(data)
elif type_ == "samplingdepth":
data = extract_range(request, "samplingDepthMin", "samplingDepthMax")
return SamplingDepthParser.from_string(data)
elif type_ == "initialmasssediment":
initial_mass = get_parameter(request, "initial_mass_sediment")
wet_or_dry = get_parameter(request, "wet_or_dry")
data = f"{initial_mass};{wet_or_dry}"
return InitialSedimentMassParser.from_string(data)
elif type_ == "initialvolumewater":
data = get_parameter(request, "initialvolumewater")
return InitialVolumeWaterParser.from_string(data)
elif type_ == "sedimentporosity":
data = get_parameter(request, "sedimentporosity")
raise ValueError("sedimentporosity is not yet implemented")
elif type_ == "columnheight":
height_sediment = get_parameter_or_empty_string(request, "column_height_sediment")
height_water = get_parameter_or_empty_string(request, "column_height_water")
data = f"{height_sediment};{height_water}"
return ColumnHeightParser.from_string(data)
elif type_ == "oxygencontent":
oxygen_content_water = extract_range(
request, "oxygen_content_water_low", "oxygen_content_water_high"
)
oxygen_content_sediment = extract_range(
request, "oxygen_content_sediment_low", "oxygen_content_sediment_high"
)
data = f"{oxygen_content_water};{oxygen_content_sediment}"
return OxygenContentParser.from_string(data)
elif type_ == "biomass_ws":
biomass_water_cells = extract_range(request, "start_water_cells", "end_water_cells")
biomass_sediment_cells = extract_range(
request, "start_sediment_cells", "end_sediment_cells"
)
biomass_sediment_mg = extract_range(request, "start_sediment_mg", "end_sediment_mg")
data = f"{biomass_water_cells};{biomass_sediment_cells};{biomass_sediment_mg}"
return WaterSedimentBiomassParser.from_string(data)
elif type_ == "soilsource":
data = get_parameter(request, "soilsourcedata")
return SoilSourceParser.from_string(data)
elif type_ == "soiltexture1":
data = get_parameter(request, "soilTextureType")
return SoilTexture1Parser.from_string(data)
elif type_ == "soiltexture2":
sand = get_parameter(request, "sand")
silt = get_parameter(request, "silt")
clay = get_parameter(request, "clay")
data = f"{sand};{silt};{clay}"
return SoilTexture2Parser.from_string(data)
elif type_ == "temperature":
data = extract_range(request, "temperatureMin", "temperatureMax")
return TemperatureParser.from_string(data)
elif type_ == "reference":
try:
data = get_parameter(request, "reference")
except ValueError:
data = get_parameter(request, "pmid")
return ReferenceParser.from_string(data)
elif type_ == "sourcescenario":
# return get_parameter(request, "sourcescenario")
raise ValueError("sourcescenario is not yet implemented")
elif type_ == "acidity":
measurement_methods = get_parameter_or_empty_string(request, "acidityType")
# ACIDITY is separated by " - ". so replace ";" by "-"
value_range = extract_range(request, "lowPh", "highPh").replace(";", " - ")
data = f"{value_range};{measurement_methods}"
return AcidityParser.from_string(data)
elif type_ == "acidity_ws":
measurement_methods_ws = get_parameter_or_empty_string(request, "acidityType")
# ACIDITY is separated by " - ". so replace ";" by "-"
value_range_acidity_water = extract_range(
request, "pH_water_low", "pH_water_high"
).replace(";", " - ")
value_range_acidity_sediment = extract_range(
request, "pH_sediment_low", "pH_sediment_high"
).replace(";", " - ")
data = f"{value_range_acidity_water};{value_range_acidity_sediment};{measurement_methods_ws}"
return WaterSedimentAcidityParser.from_string(data)
elif type_ == "waterstoragecapacity":
wst = get_parameter(request, "wst")
wst_condition = get_parameter(request, "wstConditions")
mwst = get_parameter_or_empty_string(request, "maximumWaterstoragecapacity")
data = f"{wst} - {wst_condition} - {mwst}"
return WaterStorageCapacityParser.from_string(data)
elif type_ == "spikecompound":
if (
request.get_parameter("spikeCompSmiles") is not None
and request.get_parameter("spikeCompSmiles") != ""
):
data = get_parameter(request, "spikeCompSmiles")
elif request.get_parameter("smile") is not None:
data = get_parameter(request, "smile")
else:
data = get_parameter(request, "spikeComp")
return SpikeCompoundParser.from_string(data)
elif type_ == "spikeconcentration":
concentration = get_parameter(request, "spikeConcentration")
unit = get_parameter(request, "spikeconcentrationUnit")
data = f"{concentration};{unit}"
return SpikeConcentrationParser.from_string(data)
elif type_ == "soilclassificationsystem":
data = get_parameter(request, "soilclassificationsystem")
return SoilClassificationParser.from_string(data)
elif type_ == "rulelikelihood":
data = get_parameter(request, "ruleLikelihood")
return RuleLikelihoodParser.from_string(data)
elif type_ == "aerationtype":
data = get_parameter(request, "aerationtype")
return AerationTypeParser.from_string(data)
elif type_ == "bioreactor":
bioreactor_type = get_parameter_or_empty_string(request, "bioreactortype")
bioreactor_size = get_parameter_or_empty_string(request, "bioreactorsize")
data = f"{bioreactor_type};{bioreactor_size}"
return BioReactorParser.from_string(data)
elif type_ == "finalcompoundconcentration":
data = get_parameter(request, "finalcompoundconcentration")
return FinalCompoundConcentrationParser.from_string(data)
elif type_ == "inoculumsource":
data = get_parameter(request, "inoculumsource")
return InoculumSourceParser.from_string(data)
elif type_ == "modelpredictionprob":
data = get_parameter(request, "modelpredictionprob")
raise ValueError("modelpredictionprob is not yet implemented")
elif type_ == "modelbayespredictionprob":
data = get_parameter(request, "modelbayespredictionprob")
raise ValueError("modelbayespredictionprob is not yet implemented")
elif type_ == "additionofnutrients":
data = get_parameter(request, "additionofnutrients")
return NutrientsParser.from_string(data)
elif type_ == "originalsludgeamount":
data = get_parameter(request, "originalsludgeamount")
return OriginalSludgeAmountParser.from_string(data)
elif type_ == "addparametersmeasured":
data = get_parameter(request, "addparametersmeasured")
# return AdditionalMeasuredParameterParser.from_string(data)
raise ValueError("addparametersmeasured is not yet implemented")
elif type_ == "sludgeretentiontime":
# TODO check constraints
sludge_type = get_parameter(request, "sludgeretentiontimeType")
time = get_parameter(request, "sludgeretentiontime")
data = f"{sludge_type};{time}"
return SludgeRetentionTimeParser.from_string(data)
elif type_ == "solventforcompoundsolution":
solvent1 = get_parameter(request, "solventforcompoundsolution1")
try:
solvent2 = get_parameter(request, "solventforcompoundsolution2")
proportion = get_parameter(request, "proportion")
except ValueError:
return solvent1
solvent3 = ""
try:
solvent3 = get_parameter(request, "solventforcompoundsolution3")
except ValueError:
return f"{solvent1};{solvent2};{proportion}"
data = f"{solvent1};{solvent2};{solvent3};{proportion}"
return CompoundSolutionSolventParser.from_string(data)
elif type_ == "tpa":
tpa_name = get_parameter(request, "tpaName")
tpa_ident_level = get_parameter(request, "tpaIdentLevel")
tpa_structure = get_parameter(request, "tpaStructure")
data = f"{tpa_name};{tpa_ident_level};{tpa_structure}"
raise ValueError("tpa is not yet implemented")
elif type_ == "tts":
data = extract_range(request, "ttsStart", "ttsEnd")
return TotalSuspendedSolidsParser.from_string(data)
elif type_ == "typeofaddition":
data = get_parameter(request, "typeofaddition")
return AdditionParser.from_string(data)
elif type_ == "volatiletts":
data = extract_range(request, "volatilettsStart", "volatilettsEnd")
return VolatileTotalSuspendedSolidsParser.from_string(data)
elif type_ == "enzyme":
# name = get_parameter(request, "enzymeName")
# ec_number = get_parameter(request, "enzymeECNumber")
raise ValueError("enzyme is not yet implemented")
elif type_ == "proposedintermediate":
prop = get_parameter(request, "proposed")
return ProposedIntermediateParser.from_string(prop)
elif type_ == "confidencelevel":
data = get_parameter(request, "radioconfidence")
return ConfidenceParser.from_string(data)
elif type_ == "minormajor":
data = get_parameter(request, "radiomin")
return TransformationProductImportanceParser.from_string(data)
elif type_ == "initorganism":
data = get_parameter(request, "organism")
return InitiatingOrganismParser.from_string(data)
elif type_ == "oxygenuptakerate":
data = extract_range(request, "oxygenuptakerateStart", "oxygenuptakerateEnd")
return OxygenUptakeRateParser.from_string(data)
elif type_ == "sourceofliquidmatrix":
data = get_parameter(request, "sourceofliquidmatrix")
return LiquidMatrixSourceParser.from_string(data)
elif type_ == "halflife":
lower = get_parameter(request, "lower")
upper = get_parameter(request, "upper")
i = Interval(start=float(lower), end=float(upper))
comment = get_parameter_or_empty_string(request, "comment")
source = get_parameter_or_empty_string(request, "source")
first_order = get_parameter_or_empty_string(request, "firstOrder")
model = get_parameter_or_empty_string(request, "model")
fit = get_parameter_or_empty_string(request, "fit")
if first_order != "":
if model != "":
raise ValueError("not both, model and firstOrder can be set!")
if first_order == "true":
model = "SFO"
else:
logger.info("firstOrder is set to false which is not meaningful")
return HalfLife(model=model, fit=fit, comment=comment, dt50=i, source=source)
elif type_ == "halflife_ws": # Halflife in water-sediment systems
hl_ws_total = extract_range(request, "total_low", "total_high")
# When no value is given for water and/or sediment DT50,
# extract_range results in two values separated by ";"
# and has to be replaced by empty string
hl_ws_water = extract_range(request, "water_low", "water_high")
if hl_ws_water == ";":
hl_ws_water = ""
else:
hl_ws_water = hl_ws_water.replace(";", " - ")
hl_ws_sediment = extract_range(request, "sediment_low", "sediment_high")
if hl_ws_sediment == ";":
hl_ws_sediment = ""
else:
hl_ws_sediment = hl_ws_sediment.replace(";", " - ")
comment_ws = get_parameter_or_empty_string(request, "comment_ws")
source_ws = get_parameter_or_empty_string(request, "source_ws")
model_ws = get_parameter_or_empty_string(request, "model_ws")
fit_ws = get_parameter_or_empty_string(request, "fit_ws")
dt50_total = IntervalParser.from_string(hl_ws_total)
dt50_sediment = IntervalParser.from_string(hl_ws_sediment)
dt50_water = IntervalParser.from_string(hl_ws_water)
return HalfLifeWS(
model=model_ws,
fit=fit_ws,
comment=comment_ws,
dt50_total=dt50_total,
dt50_water=dt50_water,
dt50_sediment=dt50_sediment,
source=source_ws,
)
elif type_ == "kineticevaluation":
# kinetic_dt50_lower = get_parameter(request, "kineticDt50Lower")
# kinetic_dt50_upper = get_parameter(request, "kineticDt50Upper")
#
# interval = Interval(start=float(kinetic_dt50_lower), end=float(kinetic_dt50_upper))
#
# dt50_string = f"{interval.start} - {interval.end}"
# kinetic_normalized_dt50 = get_parameter_or_empty_string(
# request, "kineticNormalizedDt50"
# )
# kinetic_chi2err = get_parameter_or_empty_string(request, "kineticChi2err")
# kinetic_t_test = get_parameter_or_empty_string(request, "kineticTTest")
# kinetic_swarc = get_parameter_or_empty_string(request, "kineticSwarc")
# kinetic_visual_fit = get_parameter_or_empty_string(request, "kineticVisualFit")
# kinetic_comment = get_parameter_or_empty_string(request, "kinetiCcomment")
# kinetic_source = get_parameter_or_empty_string(request, "kineticSource")
# kinetic_model = get_parameter_or_empty_string(request, "kineticModel")
# kinetic_k1 = get_parameter_or_empty_string(request, "kineticK1")
# kinetic_k2 = get_parameter_or_empty_string(request, "kineticK2")
# kinetic_g = get_parameter_or_empty_string(request, "kineticG")
# kinetic_tb = get_parameter_or_empty_string(request, "kineticTb")
# kinetic_alpha = get_parameter_or_empty_string(request, "kineticAlpha")
# kinetic_beta = get_parameter_or_empty_string(request, "kineticBeta")
raise ValueError("kinetic evaluation is not yet implemented")
elif type_ == "referringscenario":
data = get_parameter(request, "referringscenario")
return ValueError("referringscenario is not yet implemented")
elif type_ == "keggreference":
# kegg_compound = get_parameter(request, "keggCompound")
# kegg_reaction = get_parameter(request, "keggReaction")
# kegg_r_pair = get_parameter(request, "keggRPair")
# kegg_r_class = get_parameter(request, "keggRClass")
# kegg_metabolic_pathway = get_parameter(request, "keggMetabolicPathway")
raise ValueError("kegg reference is not yet implemented")
elif type_ == "totalorganiccarbon":
data = extract_range(request, "totalorganiccarbonStart", "totalorganiccarbonEnd")
return TotalOrganicCarbonParser.from_string(data)
elif type_ == "dissolvedorganiccarbon":
data = extract_range(
request, "dissolvedorganiccarbonStart", "dissolvedorganiccarbonEnd"
)
return DissolvedOrganicCarbonParser.from_string(data)
elif type_ == "purposeofwwtp":
data = get_parameter(request, "purposeofwwtp")
return WWTPPurposeParser.from_string(data)
elif type_ == "biologicaltreatmenttechnology":
data = get_parameter(request, "biologicaltreatmenttechnology")
return BiologicalTreatmentTechnologyParser.from_string(data)
elif type_ == "typeofaeration":
data = get_parameter(request, "typeofaeration")
return AerationTypeParser.from_string(data)
elif type_ == "pfasmanufacturingcategory":
data = get_parameter(request, "pfasmanufacturingcategory")
return PFASManufacturingCategoryParser.from_string(data)
elif type_ == "phosphoruscontent":
data = extract_influent_effluent(
request, "phosphoruscontentInfluent", "phosphoruscontentEffluent"
)
return PhosphorusContentParser.from_string(data)
elif type_ == "oxygendemand":
ox_dem_type = get_parameter(request, "oxygendemandType")
ox_inf_eff = extract_influent_effluent(
request, "oxygendemandInfluent", "oxygendemandEffluent"
)
data = f"{ox_dem_type};{ox_inf_eff}"
return OxygenDemandParser.from_string(data)
elif type_ == "nitrogencontent":
nitrogen_type = get_parameter(request, "nitrogencontentType")
nit_inf_enf = extract_influent_effluent(
request, "nitrogencontentInfluent", "nitrogencontentEffluent"
)
data = f"{nitrogen_type};{nit_inf_enf}"
return NitrogenContentParser.from_string(data)
elif type_ == "location":
data = get_parameter(request, "location")
return LocationParser.from_string(data)
elif type_ == "Dissolvedoxygenconcentration":
data = extract_range(
request, "DissolvedoxygenconcentrationLow", "DissolvedoxygenconcentrationHigh"
)
return DissolvedOxygenConcentrationParser.from_string(data)
elif type_ == "lagphase":
data = get_parameter(request, "lagphase")
raise ValueError("lagphase is not yet implemented")
elif type_ == "rateconstant":
# Order and value has to be set
value = extract_range(request, "rateconstantlower", "rateconstantupper")
if value == ";":
raise ValueError("Rate constant value has to be set.")
order = get_parameter(request, "rateconstantorder")
try:
corrected = get_parameter(request, "rateconstantcorrected")
except ValueError:
corrected = ""
try:
rate_cons_comment = get_parameter(request, "rateconstantcomment")
except ValueError:
rate_cons_comment = "no comment"
# Escape ;
rate_cons_comment = rate_cons_comment.replace(";", "\\;")
data = f"{order};{corrected};{value};{rate_cons_comment}"
return RateConstantParser.from_string(data)
elif type_ == "compoundlabel":
compound_label = get_parameter(request, "compoundlabel")
if compound_label is None or compound_label == "":
raise ValueError("compoundlabel parameter not transmitted or empty")
raise ValueError("compoundlabel is not yet implemented")
elif type_ == "observation":
observation_type = get_parameter_or_empty_string(request, "observationType")
min_occ = get_parameter_or_empty_string(request, "minOcc")
max_occ = get_parameter_or_empty_string(request, "maxOcc")
res = f"{observation_type};"
res += "NA" if min_occ == "" else min_occ
res += ";"
res += "NA" if max_occ == "" else max_occ
raise ValueError("observation is not yet implemented")
elif type_ == "studymoisture":
# moisture = get_parameter(request, "moisture")
raise ValueError("moisture is not yet implemented")
elif type_ == "studywst":
# study_wst_cond = get_parameter(request, "studywstcond")
raise ValueError("studywstcond is not yet implemented")
else:
raise ValueError(f"No corresponding AdditionalInformation for {type_} found!")
except Exception as e:
raise ValueError(
f"cannot build data string for addinf {type_}: {type(e).__name__} - {str(e)}", e
)

View File

@ -24,7 +24,6 @@ from epdb.models import (
Node, Node,
ParallelRule, ParallelRule,
Pathway, Pathway,
PluginModel,
Reaction, Reaction,
Rule, Rule,
RuleBasedRelativeReasoning, RuleBasedRelativeReasoning,
@ -427,8 +426,6 @@ class PackageExporter:
model_dict["model_type"] = "MLRelativeReasoning" model_dict["model_type"] = "MLRelativeReasoning"
elif isinstance(model, EnviFormer): elif isinstance(model, EnviFormer):
model_dict["model_type"] = "EnviFormer" model_dict["model_type"] = "EnviFormer"
elif isinstance(model, PluginModel):
model_dict["model_type"] = "PluginModel"
else: else:
model_dict["model_type"] = "EPModel" model_dict["model_type"] = "EPModel"
@ -952,8 +949,6 @@ class PackageImporter:
model = MLRelativeReasoning.objects.create(**common_fields) model = MLRelativeReasoning.objects.create(**common_fields)
elif model_type == "EnviFormer": elif model_type == "EnviFormer":
model = EnviFormer.objects.create(**common_fields) model = EnviFormer.objects.create(**common_fields)
elif model_type == "PluginModel":
model = PluginModel.objects.create(**common_fields)
else: else:
model = EPModel.objects.create(**common_fields) model = EPModel.objects.create(**common_fields)

View File

@ -45,13 +45,14 @@ def discover_plugins(_cls: Type = None) -> Dict[str, Type]:
plugins = {} plugins = {}
# Load plugins dropped to plugin dir
for entry_point in importlib.metadata.entry_points(group="enviPy_plugins"): for entry_point in importlib.metadata.entry_points(group="enviPy_plugins"):
try: try:
plugin_class = entry_point.load() plugin_class = entry_point.load()
if _cls: if _cls:
if issubclass(plugin_class, _cls): if issubclass(plugin_class, _cls):
instance = plugin_class() instance = plugin_class()
plugins[instance.name()] = instance plugins[instance.identifier()] = instance
else: else:
if ( if (
issubclass(plugin_class, Classifier) issubclass(plugin_class, Classifier)
@ -59,9 +60,17 @@ def discover_plugins(_cls: Type = None) -> Dict[str, Type]:
or issubclass(plugin_class, Property) or issubclass(plugin_class, Property)
): ):
instance = plugin_class() instance = plugin_class()
plugins[instance.name()] = instance plugins[instance.identifier()] = plugin_class
except Exception as e: except Exception as e:
print(f"Error loading plugin {entry_point.name}: {e}") print(f"Error loading plugin {entry_point.name}: {e}")
# load direct modules
for plugin_module in s.BASE_PLUGINS:
module_path, class_name = plugin_module.rsplit(".", 1)
module = importlib.import_module(module_path)
plugin_class = getattr(module, class_name)
instance = plugin_class()
plugins[instance.identifier()] = plugin_class
return plugins return plugins

306
uv.lock generated
View File

@ -377,6 +377,72 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/76/56/6d6872f79d14c0cb02f1646cbb4592eef935857c0951a105874b7b62a0c3/contextlib2-21.6.0-py2.py3-none-any.whl", hash = "sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f", size = 13277, upload-time = "2021-06-27T06:54:20.972Z" }, { url = "https://files.pythonhosted.org/packages/76/56/6d6872f79d14c0cb02f1646cbb4592eef935857c0951a105874b7b62a0c3/contextlib2-21.6.0-py2.py3-none-any.whl", hash = "sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f", size = 13277, upload-time = "2021-06-27T06:54:20.972Z" },
] ]
[[package]]
name = "contourpy"
version = "1.3.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
]
sdist = { url = "https://files.pythonhosted.org/packages/58/01/1253e6698a07380cd31a736d248a3f2a50a7c88779a1813da27503cadc2a/contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880", size = 13466174, upload-time = "2025-07-26T12:03:12.549Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/be/45/adfee365d9ea3d853550b2e735f9d66366701c65db7855cd07621732ccfc/contourpy-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b08a32ea2f8e42cf1d4be3169a98dd4be32bafe4f22b6c4cb4ba810fa9e5d2cb", size = 293419, upload-time = "2025-07-26T12:01:21.16Z" },
{ url = "https://files.pythonhosted.org/packages/53/3e/405b59cfa13021a56bba395a6b3aca8cec012b45bf177b0eaf7a202cde2c/contourpy-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:556dba8fb6f5d8742f2923fe9457dbdd51e1049c4a43fd3986a0b14a1d815fc6", size = 273979, upload-time = "2025-07-26T12:01:22.448Z" },
{ url = "https://files.pythonhosted.org/packages/d4/1c/a12359b9b2ca3a845e8f7f9ac08bdf776114eb931392fcad91743e2ea17b/contourpy-1.3.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92d9abc807cf7d0e047b95ca5d957cf4792fcd04e920ca70d48add15c1a90ea7", size = 332653, upload-time = "2025-07-26T12:01:24.155Z" },
{ url = "https://files.pythonhosted.org/packages/63/12/897aeebfb475b7748ea67b61e045accdfcf0d971f8a588b67108ed7f5512/contourpy-1.3.3-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2e8faa0ed68cb29af51edd8e24798bb661eac3bd9f65420c1887b6ca89987c8", size = 379536, upload-time = "2025-07-26T12:01:25.91Z" },
{ url = "https://files.pythonhosted.org/packages/43/8a/a8c584b82deb248930ce069e71576fc09bd7174bbd35183b7943fb1064fd/contourpy-1.3.3-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:626d60935cf668e70a5ce6ff184fd713e9683fb458898e4249b63be9e28286ea", size = 384397, upload-time = "2025-07-26T12:01:27.152Z" },
{ url = "https://files.pythonhosted.org/packages/cc/8f/ec6289987824b29529d0dfda0d74a07cec60e54b9c92f3c9da4c0ac732de/contourpy-1.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d00e655fcef08aba35ec9610536bfe90267d7ab5ba944f7032549c55a146da1", size = 362601, upload-time = "2025-07-26T12:01:28.808Z" },
{ url = "https://files.pythonhosted.org/packages/05/0a/a3fe3be3ee2dceb3e615ebb4df97ae6f3828aa915d3e10549ce016302bd1/contourpy-1.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:451e71b5a7d597379ef572de31eeb909a87246974d960049a9848c3bc6c41bf7", size = 1331288, upload-time = "2025-07-26T12:01:31.198Z" },
{ url = "https://files.pythonhosted.org/packages/33/1d/acad9bd4e97f13f3e2b18a3977fe1b4a37ecf3d38d815333980c6c72e963/contourpy-1.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:459c1f020cd59fcfe6650180678a9993932d80d44ccde1fa1868977438f0b411", size = 1403386, upload-time = "2025-07-26T12:01:33.947Z" },
{ url = "https://files.pythonhosted.org/packages/cf/8f/5847f44a7fddf859704217a99a23a4f6417b10e5ab1256a179264561540e/contourpy-1.3.3-cp312-cp312-win32.whl", hash = "sha256:023b44101dfe49d7d53932be418477dba359649246075c996866106da069af69", size = 185018, upload-time = "2025-07-26T12:01:35.64Z" },
{ url = "https://files.pythonhosted.org/packages/19/e8/6026ed58a64563186a9ee3f29f41261fd1828f527dd93d33b60feca63352/contourpy-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:8153b8bfc11e1e4d75bcb0bff1db232f9e10b274e0929de9d608027e0d34ff8b", size = 226567, upload-time = "2025-07-26T12:01:36.804Z" },
{ url = "https://files.pythonhosted.org/packages/d1/e2/f05240d2c39a1ed228d8328a78b6f44cd695f7ef47beb3e684cf93604f86/contourpy-1.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:07ce5ed73ecdc4a03ffe3e1b3e3c1166db35ae7584be76f65dbbe28a7791b0cc", size = 193655, upload-time = "2025-07-26T12:01:37.999Z" },
{ url = "https://files.pythonhosted.org/packages/68/35/0167aad910bbdb9599272bd96d01a9ec6852f36b9455cf2ca67bd4cc2d23/contourpy-1.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:177fb367556747a686509d6fef71d221a4b198a3905fe824430e5ea0fda54eb5", size = 293257, upload-time = "2025-07-26T12:01:39.367Z" },
{ url = "https://files.pythonhosted.org/packages/96/e4/7adcd9c8362745b2210728f209bfbcf7d91ba868a2c5f40d8b58f54c509b/contourpy-1.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d002b6f00d73d69333dac9d0b8d5e84d9724ff9ef044fd63c5986e62b7c9e1b1", size = 274034, upload-time = "2025-07-26T12:01:40.645Z" },
{ url = "https://files.pythonhosted.org/packages/73/23/90e31ceeed1de63058a02cb04b12f2de4b40e3bef5e082a7c18d9c8ae281/contourpy-1.3.3-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:348ac1f5d4f1d66d3322420f01d42e43122f43616e0f194fc1c9f5d830c5b286", size = 334672, upload-time = "2025-07-26T12:01:41.942Z" },
{ url = "https://files.pythonhosted.org/packages/ed/93/b43d8acbe67392e659e1d984700e79eb67e2acb2bd7f62012b583a7f1b55/contourpy-1.3.3-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:655456777ff65c2c548b7c454af9c6f33f16c8884f11083244b5819cc214f1b5", size = 381234, upload-time = "2025-07-26T12:01:43.499Z" },
{ url = "https://files.pythonhosted.org/packages/46/3b/bec82a3ea06f66711520f75a40c8fc0b113b2a75edb36aa633eb11c4f50f/contourpy-1.3.3-cp313-cp313-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:644a6853d15b2512d67881586bd03f462c7ab755db95f16f14d7e238f2852c67", size = 385169, upload-time = "2025-07-26T12:01:45.219Z" },
{ url = "https://files.pythonhosted.org/packages/4b/32/e0f13a1c5b0f8572d0ec6ae2f6c677b7991fafd95da523159c19eff0696a/contourpy-1.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4debd64f124ca62069f313a9cb86656ff087786016d76927ae2cf37846b006c9", size = 362859, upload-time = "2025-07-26T12:01:46.519Z" },
{ url = "https://files.pythonhosted.org/packages/33/71/e2a7945b7de4e58af42d708a219f3b2f4cff7386e6b6ab0a0fa0033c49a9/contourpy-1.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a15459b0f4615b00bbd1e91f1b9e19b7e63aea7483d03d804186f278c0af2659", size = 1332062, upload-time = "2025-07-26T12:01:48.964Z" },
{ url = "https://files.pythonhosted.org/packages/12/fc/4e87ac754220ccc0e807284f88e943d6d43b43843614f0a8afa469801db0/contourpy-1.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca0fdcd73925568ca027e0b17ab07aad764be4706d0a925b89227e447d9737b7", size = 1403932, upload-time = "2025-07-26T12:01:51.979Z" },
{ url = "https://files.pythonhosted.org/packages/a6/2e/adc197a37443f934594112222ac1aa7dc9a98faf9c3842884df9a9d8751d/contourpy-1.3.3-cp313-cp313-win32.whl", hash = "sha256:b20c7c9a3bf701366556e1b1984ed2d0cedf999903c51311417cf5f591d8c78d", size = 185024, upload-time = "2025-07-26T12:01:53.245Z" },
{ url = "https://files.pythonhosted.org/packages/18/0b/0098c214843213759692cc638fce7de5c289200a830e5035d1791d7a2338/contourpy-1.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:1cadd8b8969f060ba45ed7c1b714fe69185812ab43bd6b86a9123fe8f99c3263", size = 226578, upload-time = "2025-07-26T12:01:54.422Z" },
{ url = "https://files.pythonhosted.org/packages/8a/9a/2f6024a0c5995243cd63afdeb3651c984f0d2bc727fd98066d40e141ad73/contourpy-1.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:fd914713266421b7536de2bfa8181aa8c699432b6763a0ea64195ebe28bff6a9", size = 193524, upload-time = "2025-07-26T12:01:55.73Z" },
{ url = "https://files.pythonhosted.org/packages/c0/b3/f8a1a86bd3298513f500e5b1f5fd92b69896449f6cab6a146a5d52715479/contourpy-1.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:88df9880d507169449d434c293467418b9f6cbe82edd19284aa0409e7fdb933d", size = 306730, upload-time = "2025-07-26T12:01:57.051Z" },
{ url = "https://files.pythonhosted.org/packages/3f/11/4780db94ae62fc0c2053909b65dc3246bd7cecfc4f8a20d957ad43aa4ad8/contourpy-1.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d06bb1f751ba5d417047db62bca3c8fde202b8c11fb50742ab3ab962c81e8216", size = 287897, upload-time = "2025-07-26T12:01:58.663Z" },
{ url = "https://files.pythonhosted.org/packages/ae/15/e59f5f3ffdd6f3d4daa3e47114c53daabcb18574a26c21f03dc9e4e42ff0/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4e6b05a45525357e382909a4c1600444e2a45b4795163d3b22669285591c1ae", size = 326751, upload-time = "2025-07-26T12:02:00.343Z" },
{ url = "https://files.pythonhosted.org/packages/0f/81/03b45cfad088e4770b1dcf72ea78d3802d04200009fb364d18a493857210/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ab3074b48c4e2cf1a960e6bbeb7f04566bf36b1861d5c9d4d8ac04b82e38ba20", size = 375486, upload-time = "2025-07-26T12:02:02.128Z" },
{ url = "https://files.pythonhosted.org/packages/0c/ba/49923366492ffbdd4486e970d421b289a670ae8cf539c1ea9a09822b371a/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c3d53c796f8647d6deb1abe867daeb66dcc8a97e8455efa729516b997b8ed99", size = 388106, upload-time = "2025-07-26T12:02:03.615Z" },
{ url = "https://files.pythonhosted.org/packages/9f/52/5b00ea89525f8f143651f9f03a0df371d3cbd2fccd21ca9b768c7a6500c2/contourpy-1.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50ed930df7289ff2a8d7afeb9603f8289e5704755c7e5c3bbd929c90c817164b", size = 352548, upload-time = "2025-07-26T12:02:05.165Z" },
{ url = "https://files.pythonhosted.org/packages/32/1d/a209ec1a3a3452d490f6b14dd92e72280c99ae3d1e73da74f8277d4ee08f/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4feffb6537d64b84877da813a5c30f1422ea5739566abf0bd18065ac040e120a", size = 1322297, upload-time = "2025-07-26T12:02:07.379Z" },
{ url = "https://files.pythonhosted.org/packages/bc/9e/46f0e8ebdd884ca0e8877e46a3f4e633f6c9c8c4f3f6e72be3fe075994aa/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2b7e9480ffe2b0cd2e787e4df64270e3a0440d9db8dc823312e2c940c167df7e", size = 1391023, upload-time = "2025-07-26T12:02:10.171Z" },
{ url = "https://files.pythonhosted.org/packages/b9/70/f308384a3ae9cd2209e0849f33c913f658d3326900d0ff5d378d6a1422d2/contourpy-1.3.3-cp313-cp313t-win32.whl", hash = "sha256:283edd842a01e3dcd435b1c5116798d661378d83d36d337b8dde1d16a5fc9ba3", size = 196157, upload-time = "2025-07-26T12:02:11.488Z" },
{ url = "https://files.pythonhosted.org/packages/b2/dd/880f890a6663b84d9e34a6f88cded89d78f0091e0045a284427cb6b18521/contourpy-1.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:87acf5963fc2b34825e5b6b048f40e3635dd547f590b04d2ab317c2619ef7ae8", size = 240570, upload-time = "2025-07-26T12:02:12.754Z" },
{ url = "https://files.pythonhosted.org/packages/80/99/2adc7d8ffead633234817ef8e9a87115c8a11927a94478f6bb3d3f4d4f7d/contourpy-1.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:3c30273eb2a55024ff31ba7d052dde990d7d8e5450f4bbb6e913558b3d6c2301", size = 199713, upload-time = "2025-07-26T12:02:14.4Z" },
{ url = "https://files.pythonhosted.org/packages/72/8b/4546f3ab60f78c514ffb7d01a0bd743f90de36f0019d1be84d0a708a580a/contourpy-1.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fde6c716d51c04b1c25d0b90364d0be954624a0ee9d60e23e850e8d48353d07a", size = 292189, upload-time = "2025-07-26T12:02:16.095Z" },
{ url = "https://files.pythonhosted.org/packages/fd/e1/3542a9cb596cadd76fcef413f19c79216e002623158befe6daa03dbfa88c/contourpy-1.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cbedb772ed74ff5be440fa8eee9bd49f64f6e3fc09436d9c7d8f1c287b121d77", size = 273251, upload-time = "2025-07-26T12:02:17.524Z" },
{ url = "https://files.pythonhosted.org/packages/b1/71/f93e1e9471d189f79d0ce2497007731c1e6bf9ef6d1d61b911430c3db4e5/contourpy-1.3.3-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22e9b1bd7a9b1d652cd77388465dc358dafcd2e217d35552424aa4f996f524f5", size = 335810, upload-time = "2025-07-26T12:02:18.9Z" },
{ url = "https://files.pythonhosted.org/packages/91/f9/e35f4c1c93f9275d4e38681a80506b5510e9327350c51f8d4a5a724d178c/contourpy-1.3.3-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a22738912262aa3e254e4f3cb079a95a67132fc5a063890e224393596902f5a4", size = 382871, upload-time = "2025-07-26T12:02:20.418Z" },
{ url = "https://files.pythonhosted.org/packages/b5/71/47b512f936f66a0a900d81c396a7e60d73419868fba959c61efed7a8ab46/contourpy-1.3.3-cp314-cp314-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:afe5a512f31ee6bd7d0dda52ec9864c984ca3d66664444f2d72e0dc4eb832e36", size = 386264, upload-time = "2025-07-26T12:02:21.916Z" },
{ url = "https://files.pythonhosted.org/packages/04/5f/9ff93450ba96b09c7c2b3f81c94de31c89f92292f1380261bd7195bea4ea/contourpy-1.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f64836de09927cba6f79dcd00fdd7d5329f3fccc633468507079c829ca4db4e3", size = 363819, upload-time = "2025-07-26T12:02:23.759Z" },
{ url = "https://files.pythonhosted.org/packages/3e/a6/0b185d4cc480ee494945cde102cb0149ae830b5fa17bf855b95f2e70ad13/contourpy-1.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1fd43c3be4c8e5fd6e4f2baeae35ae18176cf2e5cced681cca908addf1cdd53b", size = 1333650, upload-time = "2025-07-26T12:02:26.181Z" },
{ url = "https://files.pythonhosted.org/packages/43/d7/afdc95580ca56f30fbcd3060250f66cedbde69b4547028863abd8aa3b47e/contourpy-1.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6afc576f7b33cf00996e5c1102dc2a8f7cc89e39c0b55df93a0b78c1bd992b36", size = 1404833, upload-time = "2025-07-26T12:02:28.782Z" },
{ url = "https://files.pythonhosted.org/packages/e2/e2/366af18a6d386f41132a48f033cbd2102e9b0cf6345d35ff0826cd984566/contourpy-1.3.3-cp314-cp314-win32.whl", hash = "sha256:66c8a43a4f7b8df8b71ee1840e4211a3c8d93b214b213f590e18a1beca458f7d", size = 189692, upload-time = "2025-07-26T12:02:30.128Z" },
{ url = "https://files.pythonhosted.org/packages/7d/c2/57f54b03d0f22d4044b8afb9ca0e184f8b1afd57b4f735c2fa70883dc601/contourpy-1.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:cf9022ef053f2694e31d630feaacb21ea24224be1c3ad0520b13d844274614fd", size = 232424, upload-time = "2025-07-26T12:02:31.395Z" },
{ url = "https://files.pythonhosted.org/packages/18/79/a9416650df9b525737ab521aa181ccc42d56016d2123ddcb7b58e926a42c/contourpy-1.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:95b181891b4c71de4bb404c6621e7e2390745f887f2a026b2d99e92c17892339", size = 198300, upload-time = "2025-07-26T12:02:32.956Z" },
{ url = "https://files.pythonhosted.org/packages/1f/42/38c159a7d0f2b7b9c04c64ab317042bb6952b713ba875c1681529a2932fe/contourpy-1.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:33c82d0138c0a062380332c861387650c82e4cf1747aaa6938b9b6516762e772", size = 306769, upload-time = "2025-07-26T12:02:34.2Z" },
{ url = "https://files.pythonhosted.org/packages/c3/6c/26a8205f24bca10974e77460de68d3d7c63e282e23782f1239f226fcae6f/contourpy-1.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ea37e7b45949df430fe649e5de8351c423430046a2af20b1c1961cae3afcda77", size = 287892, upload-time = "2025-07-26T12:02:35.807Z" },
{ url = "https://files.pythonhosted.org/packages/66/06/8a475c8ab718ebfd7925661747dbb3c3ee9c82ac834ccb3570be49d129f4/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d304906ecc71672e9c89e87c4675dc5c2645e1f4269a5063b99b0bb29f232d13", size = 326748, upload-time = "2025-07-26T12:02:37.193Z" },
{ url = "https://files.pythonhosted.org/packages/b4/a3/c5ca9f010a44c223f098fccd8b158bb1cb287378a31ac141f04730dc49be/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca658cd1a680a5c9ea96dc61cdbae1e85c8f25849843aa799dfd3cb370ad4fbe", size = 375554, upload-time = "2025-07-26T12:02:38.894Z" },
{ url = "https://files.pythonhosted.org/packages/80/5b/68bd33ae63fac658a4145088c1e894405e07584a316738710b636c6d0333/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ab2fd90904c503739a75b7c8c5c01160130ba67944a7b77bbf36ef8054576e7f", size = 388118, upload-time = "2025-07-26T12:02:40.642Z" },
{ url = "https://files.pythonhosted.org/packages/40/52/4c285a6435940ae25d7410a6c36bda5145839bc3f0beb20c707cda18b9d2/contourpy-1.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7301b89040075c30e5768810bc96a8e8d78085b47d8be6e4c3f5a0b4ed478a0", size = 352555, upload-time = "2025-07-26T12:02:42.25Z" },
{ url = "https://files.pythonhosted.org/packages/24/ee/3e81e1dd174f5c7fefe50e85d0892de05ca4e26ef1c9a59c2a57e43b865a/contourpy-1.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2a2a8b627d5cc6b7c41a4beff6c5ad5eb848c88255fda4a8745f7e901b32d8e4", size = 1322295, upload-time = "2025-07-26T12:02:44.668Z" },
{ url = "https://files.pythonhosted.org/packages/3c/b2/6d913d4d04e14379de429057cd169e5e00f6c2af3bb13e1710bcbdb5da12/contourpy-1.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fd6ec6be509c787f1caf6b247f0b1ca598bef13f4ddeaa126b7658215529ba0f", size = 1391027, upload-time = "2025-07-26T12:02:47.09Z" },
{ url = "https://files.pythonhosted.org/packages/93/8a/68a4ec5c55a2971213d29a9374913f7e9f18581945a7a31d1a39b5d2dfe5/contourpy-1.3.3-cp314-cp314t-win32.whl", hash = "sha256:e74a9a0f5e3fff48fb5a7f2fd2b9b70a3fe014a67522f79b7cca4c0c7e43c9ae", size = 202428, upload-time = "2025-07-26T12:02:48.691Z" },
{ url = "https://files.pythonhosted.org/packages/fa/96/fd9f641ffedc4fa3ace923af73b9d07e869496c9cc7a459103e6e978992f/contourpy-1.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:13b68d6a62db8eafaebb8039218921399baf6e47bf85006fd8529f2a08ef33fc", size = 250331, upload-time = "2025-07-26T12:02:50.137Z" },
{ url = "https://files.pythonhosted.org/packages/ae/8c/469afb6465b853afff216f9528ffda78a915ff880ed58813ba4faf4ba0b6/contourpy-1.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b7448cb5a725bb1e35ce88771b86fba35ef418952474492cf7c764059933ff8b", size = 203831, upload-time = "2025-07-26T12:02:51.449Z" },
]
[[package]] [[package]]
name = "coverage" name = "coverage"
version = "7.12.0" version = "7.12.0"
@ -507,6 +573,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/3e/7c/15ad426257615f9be8caf7f97990cf3dcbb5b8dd7ed7e0db581a1c4759dd/cryptography-46.0.2-cp38-abi3-win_arm64.whl", hash = "sha256:91447f2b17e83c9e0c89f133119d83f94ce6e0fb55dd47da0a959316e6e9cfa1", size = 2918153, upload-time = "2025-10-01T00:28:51.003Z" }, { url = "https://files.pythonhosted.org/packages/3e/7c/15ad426257615f9be8caf7f97990cf3dcbb5b8dd7ed7e0db581a1c4759dd/cryptography-46.0.2-cp38-abi3-win_arm64.whl", hash = "sha256:91447f2b17e83c9e0c89f133119d83f94ce6e0fb55dd47da0a959316e6e9cfa1", size = 2918153, upload-time = "2025-10-01T00:28:51.003Z" },
] ]
[[package]]
name = "cycler"
version = "0.12.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a9/95/a3dbbb5028f35eafb79008e7522a75244477d2838f38cbb722248dabc2a8/cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c", size = 7615, upload-time = "2023-10-07T05:32:18.335Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321, upload-time = "2023-10-07T05:32:16.783Z" },
]
[[package]] [[package]]
name = "distlib" name = "distlib"
version = "0.4.0" version = "0.4.0"
@ -638,6 +713,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/67/69/2ce28341493e35b7b182e2c0474288a350edf1cb27220517fb8abd4143c9/django_stubs_ext-5.2.6-py3-none-any.whl", hash = "sha256:c3736dcea49666140c92a1471dda83c31e8d5e71dc364ab3cedc7698dbf01cee", size = 9980, upload-time = "2025-10-05T15:30:49.957Z" }, { url = "https://files.pythonhosted.org/packages/67/69/2ce28341493e35b7b182e2c0474288a350edf1cb27220517fb8abd4143c9/django_stubs_ext-5.2.6-py3-none-any.whl", hash = "sha256:c3736dcea49666140c92a1471dda83c31e8d5e71dc364ab3cedc7698dbf01cee", size = 9980, upload-time = "2025-10-05T15:30:49.957Z" },
] ]
[[package]]
name = "emcee"
version = "3.1.6"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
]
sdist = { url = "https://files.pythonhosted.org/packages/cb/53/1045ee878cb24281387079f8ee4f0ade1622c6aae1ed1fd91a53e4fa5b19/emcee-3.1.6.tar.gz", hash = "sha256:11af4daf6ab8f9ca69681e3c29054665db7bbd87fd4eb8e437d2c3a1248c637d", size = 2871117, upload-time = "2024-04-19T10:03:19.555Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f9/ef/2196b9bf88ffa1bde45853c72df021fbd07a8fa91a0f59a22d14a050dc04/emcee-3.1.6-py2.py3-none-any.whl", hash = "sha256:f2d63752023bdccf744461450e512a5b417ae7d28f18e12acd76a33de87580cb", size = 47351, upload-time = "2024-04-19T10:03:17.522Z" },
]
[[package]] [[package]]
name = "enviformer" name = "enviformer"
version = "0.1.0" version = "0.1.0"
@ -698,6 +785,13 @@ dev = [
ms-login = [ ms-login = [
{ name = "msal" }, { name = "msal" },
] ]
pepper-plugin = [
{ name = "emcee" },
{ name = "matplotlib" },
{ name = "mordredcommunity" },
{ name = "padelpy" },
{ name = "pyyaml" },
]
[package.metadata] [package.metadata]
requires-dist = [ requires-dist = [
@ -711,16 +805,20 @@ requires-dist = [
{ name = "django-oauth-toolkit", specifier = ">=3.0.1" }, { name = "django-oauth-toolkit", specifier = ">=3.0.1" },
{ name = "django-polymorphic", specifier = ">=4.1.0" }, { name = "django-polymorphic", specifier = ">=4.1.0" },
{ name = "django-stubs", marker = "extra == 'dev'", specifier = ">=5.2.4" }, { name = "django-stubs", marker = "extra == 'dev'", specifier = ">=5.2.4" },
{ name = "emcee", marker = "extra == 'pepper-plugin'", specifier = ">=3.1.6" },
{ name = "enviformer", git = "ssh://git@git.envipath.com/enviPath/enviformer.git?rev=v0.1.4" }, { name = "enviformer", git = "ssh://git@git.envipath.com/enviPath/enviformer.git?rev=v0.1.4" },
{ name = "envipy-additional-information", git = "ssh://git@git.envipath.com/enviPath/enviPy-additional-information.git?rev=v0.4.2" }, { name = "envipy-additional-information", git = "ssh://git@git.envipath.com/enviPath/enviPy-additional-information.git?branch=develop" },
{ name = "envipy-ambit", git = "ssh://git@git.envipath.com/enviPath/enviPy-ambit.git" }, { name = "envipy-ambit", git = "ssh://git@git.envipath.com/enviPath/enviPy-ambit.git" },
{ name = "envipy-plugins", git = "ssh://git@git.envipath.com/enviPath/enviPy-plugins.git?rev=v0.1.0" }, { name = "envipy-plugins", git = "ssh://git@git.envipath.com/enviPath/enviPy-plugins.git?rev=v0.1.0" },
{ name = "epam-indigo", specifier = ">=1.30.1" }, { name = "epam-indigo", specifier = ">=1.30.1" },
{ name = "gunicorn", specifier = ">=23.0.0" }, { name = "gunicorn", specifier = ">=23.0.0" },
{ name = "jsonref", specifier = ">=1.1.0" }, { name = "jsonref", specifier = ">=1.1.0" },
{ name = "matplotlib", marker = "extra == 'pepper-plugin'", specifier = ">=3.10.8" },
{ name = "mordredcommunity", marker = "extra == 'pepper-plugin'", specifier = "==2.0.7" },
{ name = "msal", marker = "extra == 'ms-login'", specifier = ">=1.33.0" }, { name = "msal", marker = "extra == 'ms-login'", specifier = ">=1.33.0" },
{ name = "networkx", specifier = ">=3.4.2" }, { name = "networkx", specifier = ">=3.4.2" },
{ name = "nh3", specifier = "==0.3.2" }, { name = "nh3", specifier = "==0.3.2" },
{ name = "padelpy", marker = "extra == 'pepper-plugin'" },
{ name = "poethepoet", marker = "extra == 'dev'", specifier = ">=0.37.0" }, { name = "poethepoet", marker = "extra == 'dev'", specifier = ">=0.37.0" },
{ name = "polars", specifier = "==1.35.1" }, { name = "polars", specifier = "==1.35.1" },
{ name = "pre-commit", marker = "extra == 'dev'", specifier = ">=4.3.0" }, { name = "pre-commit", marker = "extra == 'dev'", specifier = ">=4.3.0" },
@ -729,6 +827,7 @@ requires-dist = [
{ name = "pytest-django", marker = "extra == 'dev'", specifier = ">=4.11.1" }, { name = "pytest-django", marker = "extra == 'dev'", specifier = ">=4.11.1" },
{ name = "pytest-playwright", marker = "extra == 'dev'", specifier = ">=0.7.1" }, { name = "pytest-playwright", marker = "extra == 'dev'", specifier = ">=0.7.1" },
{ name = "python-dotenv", specifier = ">=1.1.0" }, { name = "python-dotenv", specifier = ">=1.1.0" },
{ name = "pyyaml", marker = "extra == 'pepper-plugin'", specifier = ">=6.0.3" },
{ name = "rdkit", specifier = ">=2025.3.2" }, { name = "rdkit", specifier = ">=2025.3.2" },
{ name = "redis", specifier = ">=6.1.0" }, { name = "redis", specifier = ">=6.1.0" },
{ name = "requests", specifier = ">=2.32.3" }, { name = "requests", specifier = ">=2.32.3" },
@ -737,12 +836,12 @@ requires-dist = [
{ name = "sentry-sdk", extras = ["django"], specifier = ">=2.32.0" }, { name = "sentry-sdk", extras = ["django"], specifier = ">=2.32.0" },
{ name = "setuptools", specifier = ">=80.8.0" }, { name = "setuptools", specifier = ">=80.8.0" },
] ]
provides-extras = ["ms-login", "dev"] provides-extras = ["ms-login", "dev", "pepper-plugin"]
[[package]] [[package]]
name = "envipy-additional-information" name = "envipy-additional-information"
version = "0.4.2" version = "0.4.2"
source = { git = "ssh://git@git.envipath.com/enviPath/enviPy-additional-information.git?rev=v0.4.2#03f2c27c48503fbfc393aaa502cfe52298c55f35" } source = { git = "ssh://git@git.envipath.com/enviPath/enviPy-additional-information.git?branch=develop#04f6a01b8c5cd1342464e004e0cfaec9abc13ac5" }
dependencies = [ dependencies = [
{ name = "pydantic" }, { name = "pydantic" },
] ]
@ -782,6 +881,47 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" }, { url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" },
] ]
[[package]]
name = "fonttools"
version = "4.61.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ec/ca/cf17b88a8df95691275a3d77dc0a5ad9907f328ae53acbe6795da1b2f5ed/fonttools-4.61.1.tar.gz", hash = "sha256:6675329885c44657f826ef01d9e4fb33b9158e9d93c537d84ad8399539bc6f69", size = 3565756, upload-time = "2025-12-12T17:31:24.246Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/6f/16/7decaa24a1bd3a70c607b2e29f0adc6159f36a7e40eaba59846414765fd4/fonttools-4.61.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f3cb4a569029b9f291f88aafc927dd53683757e640081ca8c412781ea144565e", size = 2851593, upload-time = "2025-12-12T17:30:04.225Z" },
{ url = "https://files.pythonhosted.org/packages/94/98/3c4cb97c64713a8cf499b3245c3bf9a2b8fd16a3e375feff2aed78f96259/fonttools-4.61.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41a7170d042e8c0024703ed13b71893519a1a6d6e18e933e3ec7507a2c26a4b2", size = 2400231, upload-time = "2025-12-12T17:30:06.47Z" },
{ url = "https://files.pythonhosted.org/packages/b7/37/82dbef0f6342eb01f54bca073ac1498433d6ce71e50c3c3282b655733b31/fonttools-4.61.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10d88e55330e092940584774ee5e8a6971b01fc2f4d3466a1d6c158230880796", size = 4954103, upload-time = "2025-12-12T17:30:08.432Z" },
{ url = "https://files.pythonhosted.org/packages/6c/44/f3aeac0fa98e7ad527f479e161aca6c3a1e47bb6996b053d45226fe37bf2/fonttools-4.61.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:15acc09befd16a0fb8a8f62bc147e1a82817542d72184acca9ce6e0aeda9fa6d", size = 5004295, upload-time = "2025-12-12T17:30:10.56Z" },
{ url = "https://files.pythonhosted.org/packages/14/e8/7424ced75473983b964d09f6747fa09f054a6d656f60e9ac9324cf40c743/fonttools-4.61.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e6bcdf33aec38d16508ce61fd81838f24c83c90a1d1b8c68982857038673d6b8", size = 4944109, upload-time = "2025-12-12T17:30:12.874Z" },
{ url = "https://files.pythonhosted.org/packages/c8/8b/6391b257fa3d0b553d73e778f953a2f0154292a7a7a085e2374b111e5410/fonttools-4.61.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5fade934607a523614726119164ff621e8c30e8fa1ffffbbd358662056ba69f0", size = 5093598, upload-time = "2025-12-12T17:30:15.79Z" },
{ url = "https://files.pythonhosted.org/packages/d9/71/fd2ea96cdc512d92da5678a1c98c267ddd4d8c5130b76d0f7a80f9a9fde8/fonttools-4.61.1-cp312-cp312-win32.whl", hash = "sha256:75da8f28eff26defba42c52986de97b22106cb8f26515b7c22443ebc9c2d3261", size = 2269060, upload-time = "2025-12-12T17:30:18.058Z" },
{ url = "https://files.pythonhosted.org/packages/80/3b/a3e81b71aed5a688e89dfe0e2694b26b78c7d7f39a5ffd8a7d75f54a12a8/fonttools-4.61.1-cp312-cp312-win_amd64.whl", hash = "sha256:497c31ce314219888c0e2fce5ad9178ca83fe5230b01a5006726cdf3ac9f24d9", size = 2319078, upload-time = "2025-12-12T17:30:22.862Z" },
{ url = "https://files.pythonhosted.org/packages/4b/cf/00ba28b0990982530addb8dc3e9e6f2fa9cb5c20df2abdda7baa755e8fe1/fonttools-4.61.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c56c488ab471628ff3bfa80964372fc13504ece601e0d97a78ee74126b2045c", size = 2846454, upload-time = "2025-12-12T17:30:24.938Z" },
{ url = "https://files.pythonhosted.org/packages/5a/ca/468c9a8446a2103ae645d14fee3f610567b7042aba85031c1c65e3ef7471/fonttools-4.61.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dc492779501fa723b04d0ab1f5be046797fee17d27700476edc7ee9ae535a61e", size = 2398191, upload-time = "2025-12-12T17:30:27.343Z" },
{ url = "https://files.pythonhosted.org/packages/a3/4b/d67eedaed19def5967fade3297fed8161b25ba94699efc124b14fb68cdbc/fonttools-4.61.1-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:64102ca87e84261419c3747a0d20f396eb024bdbeb04c2bfb37e2891f5fadcb5", size = 4928410, upload-time = "2025-12-12T17:30:29.771Z" },
{ url = "https://files.pythonhosted.org/packages/b0/8d/6fb3494dfe61a46258cd93d979cf4725ded4eb46c2a4ca35e4490d84daea/fonttools-4.61.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c1b526c8d3f615a7b1867f38a9410849c8f4aef078535742198e942fba0e9bd", size = 4984460, upload-time = "2025-12-12T17:30:32.073Z" },
{ url = "https://files.pythonhosted.org/packages/f7/f1/a47f1d30b3dc00d75e7af762652d4cbc3dff5c2697a0dbd5203c81afd9c3/fonttools-4.61.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:41ed4b5ec103bd306bb68f81dc166e77409e5209443e5773cb4ed837bcc9b0d3", size = 4925800, upload-time = "2025-12-12T17:30:34.339Z" },
{ url = "https://files.pythonhosted.org/packages/a7/01/e6ae64a0981076e8a66906fab01539799546181e32a37a0257b77e4aa88b/fonttools-4.61.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b501c862d4901792adaec7c25b1ecc749e2662543f68bb194c42ba18d6eec98d", size = 5067859, upload-time = "2025-12-12T17:30:36.593Z" },
{ url = "https://files.pythonhosted.org/packages/73/aa/28e40b8d6809a9b5075350a86779163f074d2b617c15d22343fce81918db/fonttools-4.61.1-cp313-cp313-win32.whl", hash = "sha256:4d7092bb38c53bbc78e9255a59158b150bcdc115a1e3b3ce0b5f267dc35dd63c", size = 2267821, upload-time = "2025-12-12T17:30:38.478Z" },
{ url = "https://files.pythonhosted.org/packages/1a/59/453c06d1d83dc0951b69ef692d6b9f1846680342927df54e9a1ca91c6f90/fonttools-4.61.1-cp313-cp313-win_amd64.whl", hash = "sha256:21e7c8d76f62ab13c9472ccf74515ca5b9a761d1bde3265152a6dc58700d895b", size = 2318169, upload-time = "2025-12-12T17:30:40.951Z" },
{ url = "https://files.pythonhosted.org/packages/32/8f/4e7bf82c0cbb738d3c2206c920ca34ca74ef9dabde779030145d28665104/fonttools-4.61.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:fff4f534200a04b4a36e7ae3cb74493afe807b517a09e99cb4faa89a34ed6ecd", size = 2846094, upload-time = "2025-12-12T17:30:43.511Z" },
{ url = "https://files.pythonhosted.org/packages/71/09/d44e45d0a4f3a651f23a1e9d42de43bc643cce2971b19e784cc67d823676/fonttools-4.61.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:d9203500f7c63545b4ce3799319fe4d9feb1a1b89b28d3cb5abd11b9dd64147e", size = 2396589, upload-time = "2025-12-12T17:30:45.681Z" },
{ url = "https://files.pythonhosted.org/packages/89/18/58c64cafcf8eb677a99ef593121f719e6dcbdb7d1c594ae5a10d4997ca8a/fonttools-4.61.1-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa646ecec9528bef693415c79a86e733c70a4965dd938e9a226b0fc64c9d2e6c", size = 4877892, upload-time = "2025-12-12T17:30:47.709Z" },
{ url = "https://files.pythonhosted.org/packages/8a/ec/9e6b38c7ba1e09eb51db849d5450f4c05b7e78481f662c3b79dbde6f3d04/fonttools-4.61.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:11f35ad7805edba3aac1a3710d104592df59f4b957e30108ae0ba6c10b11dd75", size = 4972884, upload-time = "2025-12-12T17:30:49.656Z" },
{ url = "https://files.pythonhosted.org/packages/5e/87/b5339da8e0256734ba0dbbf5b6cdebb1dd79b01dc8c270989b7bcd465541/fonttools-4.61.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b931ae8f62db78861b0ff1ac017851764602288575d65b8e8ff1963fed419063", size = 4924405, upload-time = "2025-12-12T17:30:51.735Z" },
{ url = "https://files.pythonhosted.org/packages/0b/47/e3409f1e1e69c073a3a6fd8cb886eb18c0bae0ee13db2c8d5e7f8495e8b7/fonttools-4.61.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b148b56f5de675ee16d45e769e69f87623a4944f7443850bf9a9376e628a89d2", size = 5035553, upload-time = "2025-12-12T17:30:54.823Z" },
{ url = "https://files.pythonhosted.org/packages/bf/b6/1f6600161b1073a984294c6c031e1a56ebf95b6164249eecf30012bb2e38/fonttools-4.61.1-cp314-cp314-win32.whl", hash = "sha256:9b666a475a65f4e839d3d10473fad6d47e0a9db14a2f4a224029c5bfde58ad2c", size = 2271915, upload-time = "2025-12-12T17:30:57.913Z" },
{ url = "https://files.pythonhosted.org/packages/52/7b/91e7b01e37cc8eb0e1f770d08305b3655e4f002fc160fb82b3390eabacf5/fonttools-4.61.1-cp314-cp314-win_amd64.whl", hash = "sha256:4f5686e1fe5fce75d82d93c47a438a25bf0d1319d2843a926f741140b2b16e0c", size = 2323487, upload-time = "2025-12-12T17:30:59.804Z" },
{ url = "https://files.pythonhosted.org/packages/39/5c/908ad78e46c61c3e3ed70c3b58ff82ab48437faf84ec84f109592cabbd9f/fonttools-4.61.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:e76ce097e3c57c4bcb67c5aa24a0ecdbd9f74ea9219997a707a4061fbe2707aa", size = 2929571, upload-time = "2025-12-12T17:31:02.574Z" },
{ url = "https://files.pythonhosted.org/packages/bd/41/975804132c6dea64cdbfbaa59f3518a21c137a10cccf962805b301ac6ab2/fonttools-4.61.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:9cfef3ab326780c04d6646f68d4b4742aae222e8b8ea1d627c74e38afcbc9d91", size = 2435317, upload-time = "2025-12-12T17:31:04.974Z" },
{ url = "https://files.pythonhosted.org/packages/b0/5a/aef2a0a8daf1ebaae4cfd83f84186d4a72ee08fd6a8451289fcd03ffa8a4/fonttools-4.61.1-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a75c301f96db737e1c5ed5fd7d77d9c34466de16095a266509e13da09751bd19", size = 4882124, upload-time = "2025-12-12T17:31:07.456Z" },
{ url = "https://files.pythonhosted.org/packages/80/33/d6db3485b645b81cea538c9d1c9219d5805f0877fda18777add4671c5240/fonttools-4.61.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:91669ccac46bbc1d09e9273546181919064e8df73488ea087dcac3e2968df9ba", size = 5100391, upload-time = "2025-12-12T17:31:09.732Z" },
{ url = "https://files.pythonhosted.org/packages/6c/d6/675ba631454043c75fcf76f0ca5463eac8eb0666ea1d7badae5fea001155/fonttools-4.61.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c33ab3ca9d3ccd581d58e989d67554e42d8d4ded94ab3ade3508455fe70e65f7", size = 4978800, upload-time = "2025-12-12T17:31:11.681Z" },
{ url = "https://files.pythonhosted.org/packages/7f/33/d3ec753d547a8d2bdaedd390d4a814e8d5b45a093d558f025c6b990b554c/fonttools-4.61.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:664c5a68ec406f6b1547946683008576ef8b38275608e1cee6c061828171c118", size = 5006426, upload-time = "2025-12-12T17:31:13.764Z" },
{ url = "https://files.pythonhosted.org/packages/b4/40/cc11f378b561a67bea850ab50063366a0d1dd3f6d0a30ce0f874b0ad5664/fonttools-4.61.1-cp314-cp314t-win32.whl", hash = "sha256:aed04cabe26f30c1647ef0e8fbb207516fd40fe9472e9439695f5c6998e60ac5", size = 2335377, upload-time = "2025-12-12T17:31:16.49Z" },
{ url = "https://files.pythonhosted.org/packages/e4/ff/c9a2b66b39f8628531ea58b320d66d951267c98c6a38684daa8f50fb02f8/fonttools-4.61.1-cp314-cp314t-win_amd64.whl", hash = "sha256:2180f14c141d2f0f3da43f3a81bc8aa4684860f6b0e6f9e165a4831f24e6a23b", size = 2400613, upload-time = "2025-12-12T17:31:18.769Z" },
{ url = "https://files.pythonhosted.org/packages/c7/4e/ce75a57ff3aebf6fc1f4e9d508b8e5810618a33d900ad6c19eb30b290b97/fonttools-4.61.1-py3-none-any.whl", hash = "sha256:17d2bf5d541add43822bcf0c43d7d847b160c9bb01d15d5007d84e2217aaa371", size = 1148996, upload-time = "2025-12-12T17:31:21.03Z" },
]
[[package]] [[package]]
name = "frozenlist" name = "frozenlist"
version = "1.8.0" version = "1.8.0"
@ -1039,6 +1179,78 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/cd/58/4a1880ea64032185e9ae9f63940c9327c6952d5584ea544a8f66972f2fda/jwcrypto-1.5.6-py3-none-any.whl", hash = "sha256:150d2b0ebbdb8f40b77f543fb44ffd2baeff48788be71f67f03566692fd55789", size = 92520, upload-time = "2024-03-06T19:58:29.765Z" }, { url = "https://files.pythonhosted.org/packages/cd/58/4a1880ea64032185e9ae9f63940c9327c6952d5584ea544a8f66972f2fda/jwcrypto-1.5.6-py3-none-any.whl", hash = "sha256:150d2b0ebbdb8f40b77f543fb44ffd2baeff48788be71f67f03566692fd55789", size = 92520, upload-time = "2024-03-06T19:58:29.765Z" },
] ]
[[package]]
name = "kiwisolver"
version = "1.4.9"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/5c/3c/85844f1b0feb11ee581ac23fe5fce65cd049a200c1446708cc1b7f922875/kiwisolver-1.4.9.tar.gz", hash = "sha256:c3b22c26c6fd6811b0ae8363b95ca8ce4ea3c202d3d0975b2914310ceb1bcc4d", size = 97564, upload-time = "2025-08-10T21:27:49.279Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/86/c9/13573a747838aeb1c76e3267620daa054f4152444d1f3d1a2324b78255b5/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ac5a486ac389dddcc5bef4f365b6ae3ffff2c433324fb38dd35e3fab7c957999", size = 123686, upload-time = "2025-08-10T21:26:10.034Z" },
{ url = "https://files.pythonhosted.org/packages/51/ea/2ecf727927f103ffd1739271ca19c424d0e65ea473fbaeea1c014aea93f6/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2ba92255faa7309d06fe44c3a4a97efe1c8d640c2a79a5ef728b685762a6fd2", size = 66460, upload-time = "2025-08-10T21:26:11.083Z" },
{ url = "https://files.pythonhosted.org/packages/5b/5a/51f5464373ce2aeb5194508298a508b6f21d3867f499556263c64c621914/kiwisolver-1.4.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a2899935e724dd1074cb568ce7ac0dce28b2cd6ab539c8e001a8578eb106d14", size = 64952, upload-time = "2025-08-10T21:26:12.058Z" },
{ url = "https://files.pythonhosted.org/packages/70/90/6d240beb0f24b74371762873e9b7f499f1e02166a2d9c5801f4dbf8fa12e/kiwisolver-1.4.9-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f6008a4919fdbc0b0097089f67a1eb55d950ed7e90ce2cc3e640abadd2757a04", size = 1474756, upload-time = "2025-08-10T21:26:13.096Z" },
{ url = "https://files.pythonhosted.org/packages/12/42/f36816eaf465220f683fb711efdd1bbf7a7005a2473d0e4ed421389bd26c/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:67bb8b474b4181770f926f7b7d2f8c0248cbcb78b660fdd41a47054b28d2a752", size = 1276404, upload-time = "2025-08-10T21:26:14.457Z" },
{ url = "https://files.pythonhosted.org/packages/2e/64/bc2de94800adc830c476dce44e9b40fd0809cddeef1fde9fcf0f73da301f/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2327a4a30d3ee07d2fbe2e7933e8a37c591663b96ce42a00bc67461a87d7df77", size = 1294410, upload-time = "2025-08-10T21:26:15.73Z" },
{ url = "https://files.pythonhosted.org/packages/5f/42/2dc82330a70aa8e55b6d395b11018045e58d0bb00834502bf11509f79091/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7a08b491ec91b1d5053ac177afe5290adacf1f0f6307d771ccac5de30592d198", size = 1343631, upload-time = "2025-08-10T21:26:17.045Z" },
{ url = "https://files.pythonhosted.org/packages/22/fd/f4c67a6ed1aab149ec5a8a401c323cee7a1cbe364381bb6c9c0d564e0e20/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8fc5c867c22b828001b6a38d2eaeb88160bf5783c6cb4a5e440efc981ce286d", size = 2224963, upload-time = "2025-08-10T21:26:18.737Z" },
{ url = "https://files.pythonhosted.org/packages/45/aa/76720bd4cb3713314677d9ec94dcc21ced3f1baf4830adde5bb9b2430a5f/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3b3115b2581ea35bb6d1f24a4c90af37e5d9b49dcff267eeed14c3893c5b86ab", size = 2321295, upload-time = "2025-08-10T21:26:20.11Z" },
{ url = "https://files.pythonhosted.org/packages/80/19/d3ec0d9ab711242f56ae0dc2fc5d70e298bb4a1f9dfab44c027668c673a1/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858e4c22fb075920b96a291928cb7dea5644e94c0ee4fcd5af7e865655e4ccf2", size = 2487987, upload-time = "2025-08-10T21:26:21.49Z" },
{ url = "https://files.pythonhosted.org/packages/39/e9/61e4813b2c97e86b6fdbd4dd824bf72d28bcd8d4849b8084a357bc0dd64d/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ed0fecd28cc62c54b262e3736f8bb2512d8dcfdc2bcf08be5f47f96bf405b145", size = 2291817, upload-time = "2025-08-10T21:26:22.812Z" },
{ url = "https://files.pythonhosted.org/packages/a0/41/85d82b0291db7504da3c2defe35c9a8a5c9803a730f297bd823d11d5fb77/kiwisolver-1.4.9-cp312-cp312-win_amd64.whl", hash = "sha256:f68208a520c3d86ea51acf688a3e3002615a7f0238002cccc17affecc86a8a54", size = 73895, upload-time = "2025-08-10T21:26:24.37Z" },
{ url = "https://files.pythonhosted.org/packages/e2/92/5f3068cf15ee5cb624a0c7596e67e2a0bb2adee33f71c379054a491d07da/kiwisolver-1.4.9-cp312-cp312-win_arm64.whl", hash = "sha256:2c1a4f57df73965f3f14df20b80ee29e6a7930a57d2d9e8491a25f676e197c60", size = 64992, upload-time = "2025-08-10T21:26:25.732Z" },
{ url = "https://files.pythonhosted.org/packages/31/c1/c2686cda909742ab66c7388e9a1a8521a59eb89f8bcfbee28fc980d07e24/kiwisolver-1.4.9-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5d0432ccf1c7ab14f9949eec60c5d1f924f17c037e9f8b33352fa05799359b8", size = 123681, upload-time = "2025-08-10T21:26:26.725Z" },
{ url = "https://files.pythonhosted.org/packages/ca/f0/f44f50c9f5b1a1860261092e3bc91ecdc9acda848a8b8c6abfda4a24dd5c/kiwisolver-1.4.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efb3a45b35622bb6c16dbfab491a8f5a391fe0e9d45ef32f4df85658232ca0e2", size = 66464, upload-time = "2025-08-10T21:26:27.733Z" },
{ url = "https://files.pythonhosted.org/packages/2d/7a/9d90a151f558e29c3936b8a47ac770235f436f2120aca41a6d5f3d62ae8d/kiwisolver-1.4.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1a12cf6398e8a0a001a059747a1cbf24705e18fe413bc22de7b3d15c67cffe3f", size = 64961, upload-time = "2025-08-10T21:26:28.729Z" },
{ url = "https://files.pythonhosted.org/packages/e9/e9/f218a2cb3a9ffbe324ca29a9e399fa2d2866d7f348ec3a88df87fc248fc5/kiwisolver-1.4.9-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b67e6efbf68e077dd71d1a6b37e43e1a99d0bff1a3d51867d45ee8908b931098", size = 1474607, upload-time = "2025-08-10T21:26:29.798Z" },
{ url = "https://files.pythonhosted.org/packages/d9/28/aac26d4c882f14de59041636292bc838db8961373825df23b8eeb807e198/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5656aa670507437af0207645273ccdfee4f14bacd7f7c67a4306d0dcaeaf6eed", size = 1276546, upload-time = "2025-08-10T21:26:31.401Z" },
{ url = "https://files.pythonhosted.org/packages/8b/ad/8bfc1c93d4cc565e5069162f610ba2f48ff39b7de4b5b8d93f69f30c4bed/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:bfc08add558155345129c7803b3671cf195e6a56e7a12f3dde7c57d9b417f525", size = 1294482, upload-time = "2025-08-10T21:26:32.721Z" },
{ url = "https://files.pythonhosted.org/packages/da/f1/6aca55ff798901d8ce403206d00e033191f63d82dd708a186e0ed2067e9c/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:40092754720b174e6ccf9e845d0d8c7d8e12c3d71e7fc35f55f3813e96376f78", size = 1343720, upload-time = "2025-08-10T21:26:34.032Z" },
{ url = "https://files.pythonhosted.org/packages/d1/91/eed031876c595c81d90d0f6fc681ece250e14bf6998c3d7c419466b523b7/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:497d05f29a1300d14e02e6441cf0f5ee81c1ff5a304b0d9fb77423974684e08b", size = 2224907, upload-time = "2025-08-10T21:26:35.824Z" },
{ url = "https://files.pythonhosted.org/packages/e9/ec/4d1925f2e49617b9cca9c34bfa11adefad49d00db038e692a559454dfb2e/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bdd1a81a1860476eb41ac4bc1e07b3f07259e6d55bbf739b79c8aaedcf512799", size = 2321334, upload-time = "2025-08-10T21:26:37.534Z" },
{ url = "https://files.pythonhosted.org/packages/43/cb/450cd4499356f68802750c6ddc18647b8ea01ffa28f50d20598e0befe6e9/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e6b93f13371d341afee3be9f7c5964e3fe61d5fa30f6a30eb49856935dfe4fc3", size = 2488313, upload-time = "2025-08-10T21:26:39.191Z" },
{ url = "https://files.pythonhosted.org/packages/71/67/fc76242bd99f885651128a5d4fa6083e5524694b7c88b489b1b55fdc491d/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d75aa530ccfaa593da12834b86a0724f58bff12706659baa9227c2ccaa06264c", size = 2291970, upload-time = "2025-08-10T21:26:40.828Z" },
{ url = "https://files.pythonhosted.org/packages/75/bd/f1a5d894000941739f2ae1b65a32892349423ad49c2e6d0771d0bad3fae4/kiwisolver-1.4.9-cp313-cp313-win_amd64.whl", hash = "sha256:dd0a578400839256df88c16abddf9ba14813ec5f21362e1fe65022e00c883d4d", size = 73894, upload-time = "2025-08-10T21:26:42.33Z" },
{ url = "https://files.pythonhosted.org/packages/95/38/dce480814d25b99a391abbddadc78f7c117c6da34be68ca8b02d5848b424/kiwisolver-1.4.9-cp313-cp313-win_arm64.whl", hash = "sha256:d4188e73af84ca82468f09cadc5ac4db578109e52acb4518d8154698d3a87ca2", size = 64995, upload-time = "2025-08-10T21:26:43.889Z" },
{ url = "https://files.pythonhosted.org/packages/e2/37/7d218ce5d92dadc5ebdd9070d903e0c7cf7edfe03f179433ac4d13ce659c/kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:5a0f2724dfd4e3b3ac5a82436a8e6fd16baa7d507117e4279b660fe8ca38a3a1", size = 126510, upload-time = "2025-08-10T21:26:44.915Z" },
{ url = "https://files.pythonhosted.org/packages/23/b0/e85a2b48233daef4b648fb657ebbb6f8367696a2d9548a00b4ee0eb67803/kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1b11d6a633e4ed84fc0ddafd4ebfd8ea49b3f25082c04ad12b8315c11d504dc1", size = 67903, upload-time = "2025-08-10T21:26:45.934Z" },
{ url = "https://files.pythonhosted.org/packages/44/98/f2425bc0113ad7de24da6bb4dae1343476e95e1d738be7c04d31a5d037fd/kiwisolver-1.4.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61874cdb0a36016354853593cffc38e56fc9ca5aa97d2c05d3dcf6922cd55a11", size = 66402, upload-time = "2025-08-10T21:26:47.101Z" },
{ url = "https://files.pythonhosted.org/packages/98/d8/594657886df9f34c4177cc353cc28ca7e6e5eb562d37ccc233bff43bbe2a/kiwisolver-1.4.9-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:60c439763a969a6af93b4881db0eed8fadf93ee98e18cbc35bc8da868d0c4f0c", size = 1582135, upload-time = "2025-08-10T21:26:48.665Z" },
{ url = "https://files.pythonhosted.org/packages/5c/c6/38a115b7170f8b306fc929e166340c24958347308ea3012c2b44e7e295db/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92a2f997387a1b79a75e7803aa7ded2cfbe2823852ccf1ba3bcf613b62ae3197", size = 1389409, upload-time = "2025-08-10T21:26:50.335Z" },
{ url = "https://files.pythonhosted.org/packages/bf/3b/e04883dace81f24a568bcee6eb3001da4ba05114afa622ec9b6fafdc1f5e/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31d512c812daea6d8b3be3b2bfcbeb091dbb09177706569bcfc6240dcf8b41c", size = 1401763, upload-time = "2025-08-10T21:26:51.867Z" },
{ url = "https://files.pythonhosted.org/packages/9f/80/20ace48e33408947af49d7d15c341eaee69e4e0304aab4b7660e234d6288/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:52a15b0f35dad39862d376df10c5230155243a2c1a436e39eb55623ccbd68185", size = 1453643, upload-time = "2025-08-10T21:26:53.592Z" },
{ url = "https://files.pythonhosted.org/packages/64/31/6ce4380a4cd1f515bdda976a1e90e547ccd47b67a1546d63884463c92ca9/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a30fd6fdef1430fd9e1ba7b3398b5ee4e2887783917a687d86ba69985fb08748", size = 2330818, upload-time = "2025-08-10T21:26:55.051Z" },
{ url = "https://files.pythonhosted.org/packages/fa/e9/3f3fcba3bcc7432c795b82646306e822f3fd74df0ee81f0fa067a1f95668/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cc9617b46837c6468197b5945e196ee9ca43057bb7d9d1ae688101e4e1dddf64", size = 2419963, upload-time = "2025-08-10T21:26:56.421Z" },
{ url = "https://files.pythonhosted.org/packages/99/43/7320c50e4133575c66e9f7dadead35ab22d7c012a3b09bb35647792b2a6d/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:0ab74e19f6a2b027ea4f845a78827969af45ce790e6cb3e1ebab71bdf9f215ff", size = 2594639, upload-time = "2025-08-10T21:26:57.882Z" },
{ url = "https://files.pythonhosted.org/packages/65/d6/17ae4a270d4a987ef8a385b906d2bdfc9fce502d6dc0d3aea865b47f548c/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dba5ee5d3981160c28d5490f0d1b7ed730c22470ff7f6cc26cfcfaacb9896a07", size = 2391741, upload-time = "2025-08-10T21:26:59.237Z" },
{ url = "https://files.pythonhosted.org/packages/2a/8f/8f6f491d595a9e5912971f3f863d81baddccc8a4d0c3749d6a0dd9ffc9df/kiwisolver-1.4.9-cp313-cp313t-win_arm64.whl", hash = "sha256:0749fd8f4218ad2e851e11cc4dc05c7cbc0cbc4267bdfdb31782e65aace4ee9c", size = 68646, upload-time = "2025-08-10T21:27:00.52Z" },
{ url = "https://files.pythonhosted.org/packages/6b/32/6cc0fbc9c54d06c2969faa9c1d29f5751a2e51809dd55c69055e62d9b426/kiwisolver-1.4.9-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9928fe1eb816d11ae170885a74d074f57af3a0d65777ca47e9aeb854a1fba386", size = 123806, upload-time = "2025-08-10T21:27:01.537Z" },
{ url = "https://files.pythonhosted.org/packages/b2/dd/2bfb1d4a4823d92e8cbb420fe024b8d2167f72079b3bb941207c42570bdf/kiwisolver-1.4.9-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d0005b053977e7b43388ddec89fa567f43d4f6d5c2c0affe57de5ebf290dc552", size = 66605, upload-time = "2025-08-10T21:27:03.335Z" },
{ url = "https://files.pythonhosted.org/packages/f7/69/00aafdb4e4509c2ca6064646cba9cd4b37933898f426756adb2cb92ebbed/kiwisolver-1.4.9-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2635d352d67458b66fd0667c14cb1d4145e9560d503219034a18a87e971ce4f3", size = 64925, upload-time = "2025-08-10T21:27:04.339Z" },
{ url = "https://files.pythonhosted.org/packages/43/dc/51acc6791aa14e5cb6d8a2e28cefb0dc2886d8862795449d021334c0df20/kiwisolver-1.4.9-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:767c23ad1c58c9e827b649a9ab7809fd5fd9db266a9cf02b0e926ddc2c680d58", size = 1472414, upload-time = "2025-08-10T21:27:05.437Z" },
{ url = "https://files.pythonhosted.org/packages/3d/bb/93fa64a81db304ac8a246f834d5094fae4b13baf53c839d6bb6e81177129/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:72d0eb9fba308b8311685c2268cf7d0a0639a6cd027d8128659f72bdd8a024b4", size = 1281272, upload-time = "2025-08-10T21:27:07.063Z" },
{ url = "https://files.pythonhosted.org/packages/70/e6/6df102916960fb8d05069d4bd92d6d9a8202d5a3e2444494e7cd50f65b7a/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f68e4f3eeca8fb22cc3d731f9715a13b652795ef657a13df1ad0c7dc0e9731df", size = 1298578, upload-time = "2025-08-10T21:27:08.452Z" },
{ url = "https://files.pythonhosted.org/packages/7c/47/e142aaa612f5343736b087864dbaebc53ea8831453fb47e7521fa8658f30/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d84cd4061ae292d8ac367b2c3fa3aad11cb8625a95d135fe93f286f914f3f5a6", size = 1345607, upload-time = "2025-08-10T21:27:10.125Z" },
{ url = "https://files.pythonhosted.org/packages/54/89/d641a746194a0f4d1a3670fb900d0dbaa786fb98341056814bc3f058fa52/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a60ea74330b91bd22a29638940d115df9dc00af5035a9a2a6ad9399ffb4ceca5", size = 2230150, upload-time = "2025-08-10T21:27:11.484Z" },
{ url = "https://files.pythonhosted.org/packages/aa/6b/5ee1207198febdf16ac11f78c5ae40861b809cbe0e6d2a8d5b0b3044b199/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ce6a3a4e106cf35c2d9c4fa17c05ce0b180db622736845d4315519397a77beaf", size = 2325979, upload-time = "2025-08-10T21:27:12.917Z" },
{ url = "https://files.pythonhosted.org/packages/fc/ff/b269eefd90f4ae14dcc74973d5a0f6d28d3b9bb1afd8c0340513afe6b39a/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:77937e5e2a38a7b48eef0585114fe7930346993a88060d0bf886086d2aa49ef5", size = 2491456, upload-time = "2025-08-10T21:27:14.353Z" },
{ url = "https://files.pythonhosted.org/packages/fc/d4/10303190bd4d30de547534601e259a4fbf014eed94aae3e5521129215086/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:24c175051354f4a28c5d6a31c93906dc653e2bf234e8a4bbfb964892078898ce", size = 2294621, upload-time = "2025-08-10T21:27:15.808Z" },
{ url = "https://files.pythonhosted.org/packages/28/e0/a9a90416fce5c0be25742729c2ea52105d62eda6c4be4d803c2a7be1fa50/kiwisolver-1.4.9-cp314-cp314-win_amd64.whl", hash = "sha256:0763515d4df10edf6d06a3c19734e2566368980d21ebec439f33f9eb936c07b7", size = 75417, upload-time = "2025-08-10T21:27:17.436Z" },
{ url = "https://files.pythonhosted.org/packages/1f/10/6949958215b7a9a264299a7db195564e87900f709db9245e4ebdd3c70779/kiwisolver-1.4.9-cp314-cp314-win_arm64.whl", hash = "sha256:0e4e2bf29574a6a7b7f6cb5fa69293b9f96c928949ac4a53ba3f525dffb87f9c", size = 66582, upload-time = "2025-08-10T21:27:18.436Z" },
{ url = "https://files.pythonhosted.org/packages/ec/79/60e53067903d3bc5469b369fe0dfc6b3482e2133e85dae9daa9527535991/kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d976bbb382b202f71c67f77b0ac11244021cfa3f7dfd9e562eefcea2df711548", size = 126514, upload-time = "2025-08-10T21:27:19.465Z" },
{ url = "https://files.pythonhosted.org/packages/25/d1/4843d3e8d46b072c12a38c97c57fab4608d36e13fe47d47ee96b4d61ba6f/kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2489e4e5d7ef9a1c300a5e0196e43d9c739f066ef23270607d45aba368b91f2d", size = 67905, upload-time = "2025-08-10T21:27:20.51Z" },
{ url = "https://files.pythonhosted.org/packages/8c/ae/29ffcbd239aea8b93108de1278271ae764dfc0d803a5693914975f200596/kiwisolver-1.4.9-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e2ea9f7ab7fbf18fffb1b5434ce7c69a07582f7acc7717720f1d69f3e806f90c", size = 66399, upload-time = "2025-08-10T21:27:21.496Z" },
{ url = "https://files.pythonhosted.org/packages/a1/ae/d7ba902aa604152c2ceba5d352d7b62106bedbccc8e95c3934d94472bfa3/kiwisolver-1.4.9-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b34e51affded8faee0dfdb705416153819d8ea9250bbbf7ea1b249bdeb5f1122", size = 1582197, upload-time = "2025-08-10T21:27:22.604Z" },
{ url = "https://files.pythonhosted.org/packages/f2/41/27c70d427eddb8bc7e4f16420a20fefc6f480312122a59a959fdfe0445ad/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8aacd3d4b33b772542b2e01beb50187536967b514b00003bdda7589722d2a64", size = 1390125, upload-time = "2025-08-10T21:27:24.036Z" },
{ url = "https://files.pythonhosted.org/packages/41/42/b3799a12bafc76d962ad69083f8b43b12bf4fe78b097b12e105d75c9b8f1/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7cf974dd4e35fa315563ac99d6287a1024e4dc2077b8a7d7cd3d2fb65d283134", size = 1402612, upload-time = "2025-08-10T21:27:25.773Z" },
{ url = "https://files.pythonhosted.org/packages/d2/b5/a210ea073ea1cfaca1bb5c55a62307d8252f531beb364e18aa1e0888b5a0/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:85bd218b5ecfbee8c8a82e121802dcb519a86044c9c3b2e4aef02fa05c6da370", size = 1453990, upload-time = "2025-08-10T21:27:27.089Z" },
{ url = "https://files.pythonhosted.org/packages/5f/ce/a829eb8c033e977d7ea03ed32fb3c1781b4fa0433fbadfff29e39c676f32/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0856e241c2d3df4efef7c04a1e46b1936b6120c9bcf36dd216e3acd84bc4fb21", size = 2331601, upload-time = "2025-08-10T21:27:29.343Z" },
{ url = "https://files.pythonhosted.org/packages/e0/4b/b5e97eb142eb9cd0072dacfcdcd31b1c66dc7352b0f7c7255d339c0edf00/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9af39d6551f97d31a4deebeac6f45b156f9755ddc59c07b402c148f5dbb6482a", size = 2422041, upload-time = "2025-08-10T21:27:30.754Z" },
{ url = "https://files.pythonhosted.org/packages/40/be/8eb4cd53e1b85ba4edc3a9321666f12b83113a178845593307a3e7891f44/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:bb4ae2b57fc1d8cbd1cf7b1d9913803681ffa903e7488012be5b76dedf49297f", size = 2594897, upload-time = "2025-08-10T21:27:32.803Z" },
{ url = "https://files.pythonhosted.org/packages/99/dd/841e9a66c4715477ea0abc78da039832fbb09dac5c35c58dc4c41a407b8a/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:aedff62918805fb62d43a4aa2ecd4482c380dc76cd31bd7c8878588a61bd0369", size = 2391835, upload-time = "2025-08-10T21:27:34.23Z" },
{ url = "https://files.pythonhosted.org/packages/0c/28/4b2e5c47a0da96896fdfdb006340ade064afa1e63675d01ea5ac222b6d52/kiwisolver-1.4.9-cp314-cp314t-win_amd64.whl", hash = "sha256:1fa333e8b2ce4d9660f2cda9c0e1b6bafcfb2457a9d259faa82289e73ec24891", size = 79988, upload-time = "2025-08-10T21:27:35.587Z" },
{ url = "https://files.pythonhosted.org/packages/80/be/3578e8afd18c88cdf9cb4cffde75a96d2be38c5a903f1ed0ceec061bd09e/kiwisolver-1.4.9-cp314-cp314t-win_arm64.whl", hash = "sha256:4a48a2ce79d65d363597ef7b567ce3d14d68783d2b2263d98db3d9477805ba32", size = 70260, upload-time = "2025-08-10T21:27:36.606Z" },
]
[[package]] [[package]]
name = "kombu" name = "kombu"
version = "5.5.4" version = "5.5.4"
@ -1152,6 +1364,76 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" },
] ]
[[package]]
name = "matplotlib"
version = "3.10.8"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "contourpy" },
{ name = "cycler" },
{ name = "fonttools" },
{ name = "kiwisolver" },
{ name = "numpy" },
{ name = "packaging" },
{ name = "pillow" },
{ name = "pyparsing" },
{ name = "python-dateutil" },
]
sdist = { url = "https://files.pythonhosted.org/packages/8a/76/d3c6e3a13fe484ebe7718d14e269c9569c4eb0020a968a327acb3b9a8fe6/matplotlib-3.10.8.tar.gz", hash = "sha256:2299372c19d56bcd35cf05a2738308758d32b9eaed2371898d8f5bd33f084aa3", size = 34806269, upload-time = "2025-12-10T22:56:51.155Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9e/67/f997cdcbb514012eb0d10cd2b4b332667997fb5ebe26b8d41d04962fa0e6/matplotlib-3.10.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:64fcc24778ca0404ce0cb7b6b77ae1f4c7231cdd60e6778f999ee05cbd581b9a", size = 8260453, upload-time = "2025-12-10T22:55:30.709Z" },
{ url = "https://files.pythonhosted.org/packages/7e/65/07d5f5c7f7c994f12c768708bd2e17a4f01a2b0f44a1c9eccad872433e2e/matplotlib-3.10.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9a5ca4ac220a0cdd1ba6bcba3608547117d30468fefce49bb26f55c1a3d5c58", size = 8148321, upload-time = "2025-12-10T22:55:33.265Z" },
{ url = "https://files.pythonhosted.org/packages/3e/f3/c5195b1ae57ef85339fd7285dfb603b22c8b4e79114bae5f4f0fcf688677/matplotlib-3.10.8-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3ab4aabc72de4ff77b3ec33a6d78a68227bf1123465887f9905ba79184a1cc04", size = 8716944, upload-time = "2025-12-10T22:55:34.922Z" },
{ url = "https://files.pythonhosted.org/packages/00/f9/7638f5cc82ec8a7aa005de48622eecc3ed7c9854b96ba15bd76b7fd27574/matplotlib-3.10.8-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:24d50994d8c5816ddc35411e50a86ab05f575e2530c02752e02538122613371f", size = 9550099, upload-time = "2025-12-10T22:55:36.789Z" },
{ url = "https://files.pythonhosted.org/packages/57/61/78cd5920d35b29fd2a0fe894de8adf672ff52939d2e9b43cb83cd5ce1bc7/matplotlib-3.10.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:99eefd13c0dc3b3c1b4d561c1169e65fe47aab7b8158754d7c084088e2329466", size = 9613040, upload-time = "2025-12-10T22:55:38.715Z" },
{ url = "https://files.pythonhosted.org/packages/30/4e/c10f171b6e2f44d9e3a2b96efa38b1677439d79c99357600a62cc1e9594e/matplotlib-3.10.8-cp312-cp312-win_amd64.whl", hash = "sha256:dd80ecb295460a5d9d260df63c43f4afbdd832d725a531f008dad1664f458adf", size = 8142717, upload-time = "2025-12-10T22:55:41.103Z" },
{ url = "https://files.pythonhosted.org/packages/f1/76/934db220026b5fef85f45d51a738b91dea7d70207581063cd9bd8fafcf74/matplotlib-3.10.8-cp312-cp312-win_arm64.whl", hash = "sha256:3c624e43ed56313651bc18a47f838b60d7b8032ed348911c54906b130b20071b", size = 8012751, upload-time = "2025-12-10T22:55:42.684Z" },
{ url = "https://files.pythonhosted.org/packages/3d/b9/15fd5541ef4f5b9a17eefd379356cf12175fe577424e7b1d80676516031a/matplotlib-3.10.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3f2e409836d7f5ac2f1c013110a4d50b9f7edc26328c108915f9075d7d7a91b6", size = 8261076, upload-time = "2025-12-10T22:55:44.648Z" },
{ url = "https://files.pythonhosted.org/packages/8d/a0/2ba3473c1b66b9c74dc7107c67e9008cb1782edbe896d4c899d39ae9cf78/matplotlib-3.10.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:56271f3dac49a88d7fca5060f004d9d22b865f743a12a23b1e937a0be4818ee1", size = 8148794, upload-time = "2025-12-10T22:55:46.252Z" },
{ url = "https://files.pythonhosted.org/packages/75/97/a471f1c3eb1fd6f6c24a31a5858f443891d5127e63a7788678d14e249aea/matplotlib-3.10.8-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a0a7f52498f72f13d4a25ea70f35f4cb60642b466cbb0a9be951b5bc3f45a486", size = 8718474, upload-time = "2025-12-10T22:55:47.864Z" },
{ url = "https://files.pythonhosted.org/packages/01/be/cd478f4b66f48256f42927d0acbcd63a26a893136456cd079c0cc24fbabf/matplotlib-3.10.8-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:646d95230efb9ca614a7a594d4fcacde0ac61d25e37dd51710b36477594963ce", size = 9549637, upload-time = "2025-12-10T22:55:50.048Z" },
{ url = "https://files.pythonhosted.org/packages/5d/7c/8dc289776eae5109e268c4fb92baf870678dc048a25d4ac903683b86d5bf/matplotlib-3.10.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f89c151aab2e2e23cb3fe0acad1e8b82841fd265379c4cecd0f3fcb34c15e0f6", size = 9613678, upload-time = "2025-12-10T22:55:52.21Z" },
{ url = "https://files.pythonhosted.org/packages/64/40/37612487cc8a437d4dd261b32ca21fe2d79510fe74af74e1f42becb1bdb8/matplotlib-3.10.8-cp313-cp313-win_amd64.whl", hash = "sha256:e8ea3e2d4066083e264e75c829078f9e149fa119d27e19acd503de65e0b13149", size = 8142686, upload-time = "2025-12-10T22:55:54.253Z" },
{ url = "https://files.pythonhosted.org/packages/66/52/8d8a8730e968185514680c2a6625943f70269509c3dcfc0dcf7d75928cb8/matplotlib-3.10.8-cp313-cp313-win_arm64.whl", hash = "sha256:c108a1d6fa78a50646029cb6d49808ff0fc1330fda87fa6f6250c6b5369b6645", size = 8012917, upload-time = "2025-12-10T22:55:56.268Z" },
{ url = "https://files.pythonhosted.org/packages/b5/27/51fe26e1062f298af5ef66343d8ef460e090a27fea73036c76c35821df04/matplotlib-3.10.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ad3d9833a64cf48cc4300f2b406c3d0f4f4724a91c0bd5640678a6ba7c102077", size = 8305679, upload-time = "2025-12-10T22:55:57.856Z" },
{ url = "https://files.pythonhosted.org/packages/2c/1e/4de865bc591ac8e3062e835f42dd7fe7a93168d519557837f0e37513f629/matplotlib-3.10.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:eb3823f11823deade26ce3b9f40dcb4a213da7a670013929f31d5f5ed1055b22", size = 8198336, upload-time = "2025-12-10T22:55:59.371Z" },
{ url = "https://files.pythonhosted.org/packages/c6/cb/2f7b6e75fb4dce87ef91f60cac4f6e34f4c145ab036a22318ec837971300/matplotlib-3.10.8-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d9050fee89a89ed57b4fb2c1bfac9a3d0c57a0d55aed95949eedbc42070fea39", size = 8731653, upload-time = "2025-12-10T22:56:01.032Z" },
{ url = "https://files.pythonhosted.org/packages/46/b3/bd9c57d6ba670a37ab31fb87ec3e8691b947134b201f881665b28cc039ff/matplotlib-3.10.8-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b44d07310e404ba95f8c25aa5536f154c0a8ec473303535949e52eb71d0a1565", size = 9561356, upload-time = "2025-12-10T22:56:02.95Z" },
{ url = "https://files.pythonhosted.org/packages/c0/3d/8b94a481456dfc9dfe6e39e93b5ab376e50998cddfd23f4ae3b431708f16/matplotlib-3.10.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0a33deb84c15ede243aead39f77e990469fff93ad1521163305095b77b72ce4a", size = 9614000, upload-time = "2025-12-10T22:56:05.411Z" },
{ url = "https://files.pythonhosted.org/packages/bd/cd/bc06149fe5585ba800b189a6a654a75f1f127e8aab02fd2be10df7fa500c/matplotlib-3.10.8-cp313-cp313t-win_amd64.whl", hash = "sha256:3a48a78d2786784cc2413e57397981fb45c79e968d99656706018d6e62e57958", size = 8220043, upload-time = "2025-12-10T22:56:07.551Z" },
{ url = "https://files.pythonhosted.org/packages/e3/de/b22cf255abec916562cc04eef457c13e58a1990048de0c0c3604d082355e/matplotlib-3.10.8-cp313-cp313t-win_arm64.whl", hash = "sha256:15d30132718972c2c074cd14638c7f4592bd98719e2308bccea40e0538bc0cb5", size = 8062075, upload-time = "2025-12-10T22:56:09.178Z" },
{ url = "https://files.pythonhosted.org/packages/3c/43/9c0ff7a2f11615e516c3b058e1e6e8f9614ddeca53faca06da267c48345d/matplotlib-3.10.8-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b53285e65d4fa4c86399979e956235deb900be5baa7fc1218ea67fbfaeaadd6f", size = 8262481, upload-time = "2025-12-10T22:56:10.885Z" },
{ url = "https://files.pythonhosted.org/packages/6f/ca/e8ae28649fcdf039fda5ef554b40a95f50592a3c47e6f7270c9561c12b07/matplotlib-3.10.8-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:32f8dce744be5569bebe789e46727946041199030db8aeb2954d26013a0eb26b", size = 8151473, upload-time = "2025-12-10T22:56:12.377Z" },
{ url = "https://files.pythonhosted.org/packages/f1/6f/009d129ae70b75e88cbe7e503a12a4c0670e08ed748a902c2568909e9eb5/matplotlib-3.10.8-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4cf267add95b1c88300d96ca837833d4112756045364f5c734a2276038dae27d", size = 9553896, upload-time = "2025-12-10T22:56:14.432Z" },
{ url = "https://files.pythonhosted.org/packages/f5/26/4221a741eb97967bc1fd5e4c52b9aa5a91b2f4ec05b59f6def4d820f9df9/matplotlib-3.10.8-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2cf5bd12cecf46908f286d7838b2abc6c91cda506c0445b8223a7c19a00df008", size = 9824193, upload-time = "2025-12-10T22:56:16.29Z" },
{ url = "https://files.pythonhosted.org/packages/1f/f3/3abf75f38605772cf48a9daf5821cd4f563472f38b4b828c6fba6fa6d06e/matplotlib-3.10.8-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:41703cc95688f2516b480f7f339d8851a6035f18e100ee6a32bc0b8536a12a9c", size = 9615444, upload-time = "2025-12-10T22:56:18.155Z" },
{ url = "https://files.pythonhosted.org/packages/93/a5/de89ac80f10b8dc615807ee1133cd99ac74082581196d4d9590bea10690d/matplotlib-3.10.8-cp314-cp314-win_amd64.whl", hash = "sha256:83d282364ea9f3e52363da262ce32a09dfe241e4080dcedda3c0db059d3c1f11", size = 8272719, upload-time = "2025-12-10T22:56:20.366Z" },
{ url = "https://files.pythonhosted.org/packages/69/ce/b006495c19ccc0a137b48083168a37bd056392dee02f87dba0472f2797fe/matplotlib-3.10.8-cp314-cp314-win_arm64.whl", hash = "sha256:2c1998e92cd5999e295a731bcb2911c75f597d937341f3030cc24ef2733d78a8", size = 8144205, upload-time = "2025-12-10T22:56:22.239Z" },
{ url = "https://files.pythonhosted.org/packages/68/d9/b31116a3a855bd313c6fcdb7226926d59b041f26061c6c5b1be66a08c826/matplotlib-3.10.8-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b5a2b97dbdc7d4f353ebf343744f1d1f1cca8aa8bfddb4262fcf4306c3761d50", size = 8305785, upload-time = "2025-12-10T22:56:24.218Z" },
{ url = "https://files.pythonhosted.org/packages/1e/90/6effe8103f0272685767ba5f094f453784057072f49b393e3ea178fe70a5/matplotlib-3.10.8-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3f5c3e4da343bba819f0234186b9004faba952cc420fbc522dc4e103c1985908", size = 8198361, upload-time = "2025-12-10T22:56:26.787Z" },
{ url = "https://files.pythonhosted.org/packages/d7/65/a73188711bea603615fc0baecca1061429ac16940e2385433cc778a9d8e7/matplotlib-3.10.8-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f62550b9a30afde8c1c3ae450e5eb547d579dd69b25c2fc7a1c67f934c1717a", size = 9561357, upload-time = "2025-12-10T22:56:28.953Z" },
{ url = "https://files.pythonhosted.org/packages/f4/3d/b5c5d5d5be8ce63292567f0e2c43dde9953d3ed86ac2de0a72e93c8f07a1/matplotlib-3.10.8-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:495672de149445ec1b772ff2c9ede9b769e3cb4f0d0aa7fa730d7f59e2d4e1c1", size = 9823610, upload-time = "2025-12-10T22:56:31.455Z" },
{ url = "https://files.pythonhosted.org/packages/4d/4b/e7beb6bbd49f6bae727a12b270a2654d13c397576d25bd6786e47033300f/matplotlib-3.10.8-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:595ba4d8fe983b88f0eec8c26a241e16d6376fe1979086232f481f8f3f67494c", size = 9614011, upload-time = "2025-12-10T22:56:33.85Z" },
{ url = "https://files.pythonhosted.org/packages/7c/e6/76f2813d31f032e65f6f797e3f2f6e4aab95b65015924b1c51370395c28a/matplotlib-3.10.8-cp314-cp314t-win_amd64.whl", hash = "sha256:25d380fe8b1dc32cf8f0b1b448470a77afb195438bafdf1d858bfb876f3edf7b", size = 8362801, upload-time = "2025-12-10T22:56:36.107Z" },
{ url = "https://files.pythonhosted.org/packages/5d/49/d651878698a0b67f23aa28e17f45a6d6dd3d3f933fa29087fa4ce5947b5a/matplotlib-3.10.8-cp314-cp314t-win_arm64.whl", hash = "sha256:113bb52413ea508ce954a02c10ffd0d565f9c3bc7f2eddc27dfe1731e71c7b5f", size = 8192560, upload-time = "2025-12-10T22:56:38.008Z" },
]
[[package]]
name = "mordredcommunity"
version = "2.0.7"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "networkx" },
{ name = "numpy" },
{ name = "packaging" },
{ name = "rdkit" },
{ name = "six" },
]
sdist = { url = "https://files.pythonhosted.org/packages/9b/cb/953ff6385cd299dab12cbe7bbcd16dbcf778d14499a229b8e882c8b629d3/mordredcommunity-2.0.7.tar.gz", hash = "sha256:6719be351c5fd80461739a4e79acb4480f0c9fb1eb2f7a3ab576c9092e1d74a8", size = 130842, upload-time = "2026-01-22T15:05:10.826Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2e/28/a5f6bf29558e8eaaac089aa3fcecfee2f8a44b7f4783cd86c5722f3c4530/mordredcommunity-2.0.7-py3-none-any.whl", hash = "sha256:36093d078df9c35419b26ca422a1c7f9ff3693b92ea5e43ea351de245f60020a", size = 176002, upload-time = "2026-01-22T15:05:09.538Z" },
]
[[package]] [[package]]
name = "mpmath" name = "mpmath"
version = "1.3.0" version = "1.3.0"
@ -1573,6 +1855,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
] ]
[[package]]
name = "padelpy"
version = "0.1.16"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/5f/19/6af9c17ce403cb372dd7ed75a5e6f00d0a7695ffaccd60584c33e9defebe/padelpy-0.1.16.tar.gz", hash = "sha256:bbe11fd93b3f7914f57b5dafbf83c5070161246fce4c626c1f394d73efabf394", size = 20873861, upload-time = "2023-11-10T22:49:06.075Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/eb/31/75879902fbdd5079a2177845a70c5d5de2915317c1187a83af3a857e70a8/padelpy-0.1.16-py3-none-any.whl", hash = "sha256:fb2814d48c498981c8ba10613e752e6ba856ccbd532aedcdc555154e87abf5b1", size = 20889833, upload-time = "2023-11-10T22:49:02.92Z" },
]
[[package]] [[package]]
name = "pastel" name = "pastel"
version = "0.2.1" version = "0.2.1"
@ -1998,6 +2289,15 @@ crypto = [
{ name = "cryptography" }, { name = "cryptography" },
] ]
[[package]]
name = "pyparsing"
version = "3.3.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f3/91/9c6ee907786a473bf81c5f53cf703ba0957b23ab84c264080fb5a450416f/pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc", size = 6851574, upload-time = "2026-01-21T03:57:59.36Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" },
]
[[package]] [[package]]
name = "pytest" name = "pytest"
version = "8.4.2" version = "8.4.2"