forked from enviPath/enviPy
mrg
This commit is contained in:
@ -4,6 +4,7 @@ import json
|
||||
from typing import Union, List, Optional, Set, Dict, Any
|
||||
from uuid import UUID
|
||||
|
||||
import nh3
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.db import transaction
|
||||
from django.conf import settings as s
|
||||
@ -185,6 +186,12 @@ class UserManager(object):
|
||||
def create_user(
|
||||
username, email, password, set_setting=True, add_to_group=True, *args, **kwargs
|
||||
):
|
||||
# Clean for potential XSS
|
||||
clean_username = nh3.clean(username).strip()
|
||||
clean_email = nh3.clean(email).strip()
|
||||
if clean_username != username or clean_email != email:
|
||||
# This will be caught by the try in view.py/register
|
||||
raise ValueError("Invalid username or password")
|
||||
# avoid circular import :S
|
||||
from .tasks import send_registration_mail
|
||||
|
||||
@ -262,8 +269,9 @@ class GroupManager(object):
|
||||
@staticmethod
|
||||
def create_group(current_user, name, description):
|
||||
g = Group()
|
||||
g.name = name
|
||||
g.description = description
|
||||
# Clean for potential XSS
|
||||
g.name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
g.description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
g.owner = current_user
|
||||
g.save()
|
||||
|
||||
@ -518,8 +526,13 @@ class PackageManager(object):
|
||||
@transaction.atomic
|
||||
def create_package(current_user, name: str, description: str = None):
|
||||
p = Package()
|
||||
p.name = name
|
||||
p.description = description
|
||||
|
||||
# Clean for potential XSS
|
||||
p.name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
if description is not None and description.strip() != "":
|
||||
p.description = nh3.clean(description.strip(), tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
p.save()
|
||||
|
||||
up = UserPackagePermission()
|
||||
@ -1094,28 +1107,29 @@ class SettingManager(object):
|
||||
model: EPModel = None,
|
||||
model_threshold: float = None,
|
||||
):
|
||||
s = Setting()
|
||||
s.name = name
|
||||
s.description = description
|
||||
s.max_nodes = max_nodes
|
||||
s.max_depth = max_depth
|
||||
s.model = model
|
||||
s.model_threshold = model_threshold
|
||||
new_s = Setting()
|
||||
# Clean for potential XSS
|
||||
new_s.name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
new_s.description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
new_s.max_nodes = max_nodes
|
||||
new_s.max_depth = max_depth
|
||||
new_s.model = model
|
||||
new_s.model_threshold = model_threshold
|
||||
|
||||
s.save()
|
||||
new_s.save()
|
||||
|
||||
if rule_packages is not None:
|
||||
for r in rule_packages:
|
||||
s.rule_packages.add(r)
|
||||
s.save()
|
||||
new_s.rule_packages.add(r)
|
||||
new_s.save()
|
||||
|
||||
usp = UserSettingPermission()
|
||||
usp.user = user
|
||||
usp.setting = s
|
||||
usp.setting = new_s
|
||||
usp.permission = Permission.ALL[0]
|
||||
usp.save()
|
||||
|
||||
return s
|
||||
return new_s
|
||||
|
||||
@staticmethod
|
||||
def get_default_setting(user: User):
|
||||
@ -1542,7 +1556,9 @@ class SPathway(object):
|
||||
if sub.app_domain_assessment is None:
|
||||
if self.prediction_setting.model:
|
||||
if self.prediction_setting.model.app_domain:
|
||||
app_domain_assessment = self.prediction_setting.model.app_domain.assess(sub.smiles)
|
||||
app_domain_assessment = self.prediction_setting.model.app_domain.assess(
|
||||
sub.smiles
|
||||
)
|
||||
|
||||
if self.persist is not None:
|
||||
n = self.snode_persist_lookup[sub]
|
||||
@ -1574,7 +1590,9 @@ class SPathway(object):
|
||||
app_domain_assessment = None
|
||||
if self.prediction_setting.model:
|
||||
if self.prediction_setting.model.app_domain:
|
||||
app_domain_assessment = (self.prediction_setting.model.app_domain.assess(c))
|
||||
app_domain_assessment = (
|
||||
self.prediction_setting.model.app_domain.assess(c)
|
||||
)
|
||||
|
||||
self.smiles_to_node[c] = SNode(
|
||||
c, sub.depth + 1, app_domain_assessment
|
||||
|
||||
190
epdb/models.py
190
epdb/models.py
@ -11,6 +11,7 @@ from typing import Union, List, Optional, Dict, Tuple, Set, Any
|
||||
from uuid import uuid4
|
||||
import math
|
||||
import joblib
|
||||
import nh3
|
||||
import numpy as np
|
||||
from django.conf import settings as s
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
@ -28,8 +29,14 @@ from sklearn.metrics import precision_score, recall_score, jaccard_score
|
||||
from sklearn.model_selection import ShuffleSplit
|
||||
|
||||
from utilities.chem import FormatConverter, ProductSet, PredictionResult, IndigoUtils
|
||||
from utilities.ml import RuleBasedDataset, ApplicabilityDomainPCA, EnsembleClassifierChain, RelativeReasoning, \
|
||||
EnviFormerDataset, Dataset
|
||||
from utilities.ml import (
|
||||
RuleBasedDataset,
|
||||
ApplicabilityDomainPCA,
|
||||
EnsembleClassifierChain,
|
||||
RelativeReasoning,
|
||||
EnviFormerDataset,
|
||||
Dataset,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -803,14 +810,16 @@ class Compound(EnviPathModel, AliasMixin, ScenarioMixin, ChemicalIdentifierMixin
|
||||
c = Compound()
|
||||
c.package = package
|
||||
|
||||
if name is None or name.strip() == "":
|
||||
if name is not None:
|
||||
# Clean for potential XSS
|
||||
name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
if name is None or name == "":
|
||||
name = f"Compound {Compound.objects.filter(package=package).count() + 1}"
|
||||
|
||||
c.name = name
|
||||
|
||||
# We have a default here only set the value if it carries some payload
|
||||
if description is not None and description.strip() != "":
|
||||
c.description = description.strip()
|
||||
c.description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
c.save()
|
||||
|
||||
@ -982,11 +991,11 @@ class CompoundStructure(EnviPathModel, AliasMixin, ScenarioMixin, ChemicalIdenti
|
||||
raise ValueError("Unpersisted Compound! Persist compound first!")
|
||||
|
||||
cs = CompoundStructure()
|
||||
# Clean for potential XSS
|
||||
if name is not None:
|
||||
cs.name = name
|
||||
|
||||
cs.name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
if description is not None:
|
||||
cs.description = description
|
||||
cs.description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
cs.smiles = smiles
|
||||
cs.compound = compound
|
||||
@ -1188,21 +1197,29 @@ class SimpleAmbitRule(SimpleRule):
|
||||
r = SimpleAmbitRule()
|
||||
r.package = package
|
||||
|
||||
if name is None or name.strip() == "":
|
||||
if name is not None:
|
||||
name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
if name is None or name == "":
|
||||
name = f"Rule {Rule.objects.filter(package=package).count() + 1}"
|
||||
|
||||
r.name = name
|
||||
|
||||
if description is not None and description.strip() != "":
|
||||
r.description = description
|
||||
r.description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
r.smirks = smirks
|
||||
|
||||
if reactant_filter_smarts is not None and reactant_filter_smarts.strip() != "":
|
||||
r.reactant_filter_smarts = reactant_filter_smarts
|
||||
if not FormatConverter.is_valid_smarts(reactant_filter_smarts.strip()):
|
||||
raise ValueError(f'Reactant Filter SMARTS "{reactant_filter_smarts}" is invalid!')
|
||||
else:
|
||||
r.reactant_filter_smarts = reactant_filter_smarts.strip()
|
||||
|
||||
if product_filter_smarts is not None and product_filter_smarts.strip() != "":
|
||||
r.product_filter_smarts = product_filter_smarts
|
||||
if not FormatConverter.is_valid_smarts(product_filter_smarts.strip()):
|
||||
raise ValueError(f'Product Filter SMARTS "{product_filter_smarts}" is invalid!')
|
||||
else:
|
||||
r.product_filter_smarts = product_filter_smarts.strip()
|
||||
|
||||
r.save()
|
||||
return r
|
||||
@ -1403,12 +1420,11 @@ class Reaction(EnviPathModel, AliasMixin, ScenarioMixin, ReactionIdentifierMixin
|
||||
|
||||
r = Reaction()
|
||||
r.package = package
|
||||
|
||||
# Clean for potential XSS
|
||||
if name is not None and name.strip() != "":
|
||||
r.name = name
|
||||
|
||||
r.name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
if description is not None and name.strip() != "":
|
||||
r.description = description
|
||||
r.description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
r.multi_step = multi_step
|
||||
|
||||
@ -1716,14 +1732,15 @@ class Pathway(EnviPathModel, AliasMixin, ScenarioMixin):
|
||||
):
|
||||
pw = Pathway()
|
||||
pw.package = package
|
||||
|
||||
if name is None or name.strip() == "":
|
||||
if name is not None:
|
||||
# Clean for potential XSS
|
||||
name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
if name is None or name == "":
|
||||
name = f"Pathway {Pathway.objects.filter(package=package).count() + 1}"
|
||||
|
||||
pw.name = name
|
||||
|
||||
if description is not None and description.strip() != "":
|
||||
pw.description = description
|
||||
pw.description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
pw.save()
|
||||
try:
|
||||
@ -2018,11 +2035,16 @@ class Edge(EnviPathModel, AliasMixin, ScenarioMixin):
|
||||
for node in end_nodes:
|
||||
e.end_nodes.add(node)
|
||||
|
||||
if name is None:
|
||||
# Clean for potential XSS
|
||||
# Cleaning technically not needed as it is also done in Reaction.create, including it here for consistency
|
||||
if name is not None:
|
||||
name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
if name is None or name == "":
|
||||
name = f"Reaction {pathway.package.reactions.count() + 1}"
|
||||
|
||||
if description is None:
|
||||
description = s.DEFAULT_VALUES["description"]
|
||||
description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
r = Reaction.create(
|
||||
pathway.package,
|
||||
@ -2344,7 +2366,9 @@ class PackageBasedModel(EPModel):
|
||||
eval_reactions = list(
|
||||
Reaction.objects.filter(package__in=self.eval_packages.all()).distinct()
|
||||
)
|
||||
ds = RuleBasedDataset.generate_dataset(eval_reactions, self.applicable_rules, educts_only=True)
|
||||
ds = RuleBasedDataset.generate_dataset(
|
||||
eval_reactions, self.applicable_rules, educts_only=True
|
||||
)
|
||||
if isinstance(self, RuleBasedRelativeReasoning):
|
||||
X = ds.X(exclude_id_col=False, na_replacement=None).to_numpy()
|
||||
y = ds.y(na_replacement=np.nan).to_numpy()
|
||||
@ -2542,14 +2566,15 @@ class RuleBasedRelativeReasoning(PackageBasedModel):
|
||||
):
|
||||
rbrr = RuleBasedRelativeReasoning()
|
||||
rbrr.package = package
|
||||
|
||||
if name is None or name.strip() == "":
|
||||
if name is not None:
|
||||
# Clean for potential XSS
|
||||
name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
if name is None or name == "":
|
||||
name = f"RuleBasedRelativeReasoning {RuleBasedRelativeReasoning.objects.filter(package=package).count() + 1}"
|
||||
|
||||
rbrr.name = name
|
||||
|
||||
if description is not None and description.strip() != "":
|
||||
rbrr.description = description
|
||||
rbrr.description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
if threshold is None or (threshold <= 0 or 1 <= threshold):
|
||||
raise ValueError("Threshold must be a float between 0 and 1.")
|
||||
@ -2646,14 +2671,15 @@ class MLRelativeReasoning(PackageBasedModel):
|
||||
):
|
||||
mlrr = MLRelativeReasoning()
|
||||
mlrr.package = package
|
||||
|
||||
if name is None or name.strip() == "":
|
||||
if name is not None:
|
||||
# Clean for potential XSS
|
||||
name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
if name is None or name == "":
|
||||
name = f"MLRelativeReasoning {MLRelativeReasoning.objects.filter(package=package).count() + 1}"
|
||||
|
||||
mlrr.name = name
|
||||
|
||||
if description is not None and description.strip() != "":
|
||||
mlrr.description = description
|
||||
mlrr.description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
if threshold is None or (threshold <= 0 or 1 <= threshold):
|
||||
raise ValueError("Threshold must be a float between 0 and 1.")
|
||||
@ -2807,7 +2833,9 @@ class ApplicabilityDomain(EnviPathModel):
|
||||
else:
|
||||
smiles.append(structures)
|
||||
|
||||
assessment_ds, assessment_prods = ds.classification_dataset(structures, self.model.applicable_rules)
|
||||
assessment_ds, assessment_prods = ds.classification_dataset(
|
||||
structures, self.model.applicable_rules
|
||||
)
|
||||
|
||||
# qualified_neighbours_per_rule is a nested dictionary structured as:
|
||||
# {
|
||||
@ -2823,12 +2851,16 @@ class ApplicabilityDomain(EnviPathModel):
|
||||
qualified_neighbours_per_rule: Dict = {}
|
||||
|
||||
import polars as pl
|
||||
|
||||
# Select only the triggered columns
|
||||
for i, row in enumerate(assessment_ds[:, assessment_ds.triggered()].iter_rows(named=True)):
|
||||
# Find the rules the structure triggers. For each rule, filter the training dataset to rows that also
|
||||
# trigger that rule.
|
||||
train_trig = {trig_uuid.split("_")[-1]: ds.filter(pl.col(trig_uuid).eq(1))
|
||||
for trig_uuid, value in row.items() if value == 1}
|
||||
train_trig = {
|
||||
trig_uuid.split("_")[-1]: ds.filter(pl.col(trig_uuid).eq(1))
|
||||
for trig_uuid, value in row.items()
|
||||
if value == 1
|
||||
}
|
||||
qualified_neighbours_per_rule[i] = train_trig
|
||||
rule_to_i = {str(r.uuid): i for i, r in enumerate(self.model.applicable_rules)}
|
||||
preds = self.model.combine_products_and_probs(
|
||||
@ -2848,18 +2880,28 @@ class ApplicabilityDomain(EnviPathModel):
|
||||
# loop through rule indices together with the collected neighbours indices from train dataset
|
||||
for rule_uuid, train_instances in qualified_neighbours_per_rule[i].items():
|
||||
# compute tanimoto distance for all neighbours and add to dataset
|
||||
dists = self._compute_distances(assessment_ds[i, assessment_ds.struct_features()].to_numpy()[0],
|
||||
train_instances[:, train_instances.struct_features()].to_numpy())
|
||||
dists = self._compute_distances(
|
||||
assessment_ds[i, assessment_ds.struct_features()].to_numpy()[0],
|
||||
train_instances[:, train_instances.struct_features()].to_numpy(),
|
||||
)
|
||||
train_instances = train_instances.with_columns(dist=pl.Series(dists))
|
||||
|
||||
# sort them in a descending way and take at most `self.num_neighbours`
|
||||
# TODO: Should this be descending? If we want the most similar then we want values close to zero (ascending)
|
||||
train_instances = train_instances.sort("dist", descending=True)[:self.num_neighbours]
|
||||
train_instances = train_instances.sort("dist", descending=True)[
|
||||
: self.num_neighbours
|
||||
]
|
||||
# compute average distance
|
||||
rule_reliabilities[rule_uuid] = train_instances.select(pl.mean("dist")).fill_nan(0.0).item()
|
||||
rule_reliabilities[rule_uuid] = (
|
||||
train_instances.select(pl.mean("dist")).fill_nan(0.0).item()
|
||||
)
|
||||
# for local_compatibility we'll need the datasets for the indices having the highest similarity
|
||||
local_compatibilities[rule_uuid] = self._compute_compatibility(rule_uuid, train_instances)
|
||||
neighbours_per_rule[rule_uuid] = list(CompoundStructure.objects.filter(uuid__in=train_instances["structure_id"]))
|
||||
local_compatibilities[rule_uuid] = self._compute_compatibility(
|
||||
rule_uuid, train_instances
|
||||
)
|
||||
neighbours_per_rule[rule_uuid] = list(
|
||||
CompoundStructure.objects.filter(uuid__in=train_instances["structure_id"])
|
||||
)
|
||||
neighbor_probs_per_rule[rule_uuid] = train_instances[f"prob_{rule_uuid}"].to_list()
|
||||
|
||||
ad_res = {
|
||||
@ -2933,8 +2975,11 @@ class ApplicabilityDomain(EnviPathModel):
|
||||
def _compute_compatibility(self, rule_idx: int, neighbours: "RuleBasedDataset"):
|
||||
accuracy = 0.0
|
||||
import polars as pl
|
||||
obs_pred = neighbours.select(obs=pl.col(f"obs_{rule_idx}").cast(pl.Boolean),
|
||||
pred=pl.col(f"prob_{rule_idx}") >= self.model.threshold)
|
||||
|
||||
obs_pred = neighbours.select(
|
||||
obs=pl.col(f"obs_{rule_idx}").cast(pl.Boolean),
|
||||
pred=pl.col(f"prob_{rule_idx}") >= self.model.threshold,
|
||||
)
|
||||
# Compute tp, tn, fp, fn using polars expressions
|
||||
tp = obs_pred.filter((pl.col("obs")) & (pl.col("pred"))).height
|
||||
tn = obs_pred.filter((~pl.col("obs")) & (~pl.col("pred"))).height
|
||||
@ -2961,14 +3006,15 @@ class EnviFormer(PackageBasedModel):
|
||||
):
|
||||
mod = EnviFormer()
|
||||
mod.package = package
|
||||
|
||||
if name is None or name.strip() == "":
|
||||
if name is not None:
|
||||
# Clean for potential XSS
|
||||
name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
if name is None or name == "":
|
||||
name = f"EnviFormer {EnviFormer.objects.filter(package=package).count() + 1}"
|
||||
|
||||
mod.name = name
|
||||
|
||||
if description is not None and description.strip() != "":
|
||||
mod.description = description
|
||||
mod.description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
if threshold is None or (threshold <= 0 or 1 <= threshold):
|
||||
raise ValueError("Threshold must be a float between 0 and 1.")
|
||||
@ -3103,7 +3149,7 @@ class EnviFormer(PackageBasedModel):
|
||||
pred_dict = {}
|
||||
for k, pred in enumerate(predictions):
|
||||
pred_smiles, pred_proba = zip(*pred.items())
|
||||
reactant, true_product = test_ds[k, "educts"], test_ds[k, "products"]
|
||||
reactant, _ = test_ds[k, "educts"], test_ds[k, "products"]
|
||||
pred_dict.setdefault(reactant, {"predict": [], "scores": []})
|
||||
for smiles, proba in zip(pred_smiles, pred_proba):
|
||||
smiles = set(smiles.split("."))
|
||||
@ -3217,8 +3263,9 @@ class EnviFormer(PackageBasedModel):
|
||||
|
||||
# If there are eval packages perform single generation evaluation on them instead of random splits
|
||||
if self.eval_packages.count() > 0:
|
||||
ds = EnviFormerDataset.generate_dataset(Reaction.objects.filter(
|
||||
package__in=self.eval_packages.all()).distinct())
|
||||
ds = EnviFormerDataset.generate_dataset(
|
||||
Reaction.objects.filter(package__in=self.eval_packages.all()).distinct()
|
||||
)
|
||||
test_result = self.model.predict_batch(ds.X())
|
||||
single_gen_result = evaluate_sg(ds, test_result, self.threshold)
|
||||
self.eval_results = self.compute_averages([single_gen_result])
|
||||
@ -3236,7 +3283,9 @@ class EnviFormer(PackageBasedModel):
|
||||
train = ds[train_index]
|
||||
test = ds[test_index]
|
||||
start = datetime.now()
|
||||
model = fine_tune(train.X(), train.y(), s.MODEL_DIR, str(split_id), device=s.ENVIFORMER_DEVICE)
|
||||
model = fine_tune(
|
||||
train.X(), train.y(), s.MODEL_DIR, str(split_id), device=s.ENVIFORMER_DEVICE
|
||||
)
|
||||
end = datetime.now()
|
||||
logger.debug(
|
||||
f"EnviFormer finetuning took {(end - start).total_seconds():.2f} seconds"
|
||||
@ -3313,7 +3362,12 @@ class EnviFormer(PackageBasedModel):
|
||||
for pathway in train_pathways:
|
||||
for reaction in pathway.edges:
|
||||
reaction = reaction.edge_label
|
||||
if any([educt in test_educts for educt in reaction_to_educts[str(reaction.uuid)]]):
|
||||
if any(
|
||||
[
|
||||
educt in test_educts
|
||||
for educt in reaction_to_educts[str(reaction.uuid)]
|
||||
]
|
||||
):
|
||||
overlap += 1
|
||||
continue
|
||||
train_reactions.append(reaction)
|
||||
@ -3370,41 +3424,44 @@ class Scenario(EnviPathModel):
|
||||
scenario_type: str,
|
||||
additional_information: List["EnviPyModel"],
|
||||
):
|
||||
s = Scenario()
|
||||
s.package = package
|
||||
|
||||
if name is None or name.strip() == "":
|
||||
new_s = Scenario()
|
||||
new_s.package = package
|
||||
if name is not None:
|
||||
# Clean for potential XSS
|
||||
name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
if name is None or name == "":
|
||||
name = f"Scenario {Scenario.objects.filter(package=package).count() + 1}"
|
||||
|
||||
s.name = name
|
||||
new_s.name = name
|
||||
|
||||
if description is not None and description.strip() != "":
|
||||
s.description = description
|
||||
new_s.description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
if scenario_date is not None and scenario_date.strip() != "":
|
||||
s.scenario_date = scenario_date
|
||||
new_s.scenario_date = nh3.clean(scenario_date).strip()
|
||||
|
||||
if scenario_type is not None and scenario_type.strip() != "":
|
||||
s.scenario_type = scenario_type
|
||||
new_s.scenario_type = scenario_type
|
||||
|
||||
add_inf = defaultdict(list)
|
||||
|
||||
for info in additional_information:
|
||||
cls_name = info.__class__.__name__
|
||||
ai_data = json.loads(info.model_dump_json())
|
||||
# Clean for potential XSS hidden in the additional information fields.
|
||||
ai_data = json.loads(nh3.clean(info.model_dump_json()).strip())
|
||||
ai_data["uuid"] = f"{uuid4()}"
|
||||
add_inf[cls_name].append(ai_data)
|
||||
|
||||
s.additional_information = add_inf
|
||||
new_s.additional_information = add_inf
|
||||
|
||||
s.save()
|
||||
new_s.save()
|
||||
|
||||
return s
|
||||
return new_s
|
||||
|
||||
@transaction.atomic
|
||||
def add_additional_information(self, data: "EnviPyModel"):
|
||||
cls_name = data.__class__.__name__
|
||||
ai_data = json.loads(data.model_dump_json())
|
||||
# Clean for potential XSS hidden in the additional information fields.
|
||||
ai_data = json.loads(nh3.clean(data.model_dump_json()).strip())
|
||||
ai_data["uuid"] = f"{uuid4()}"
|
||||
|
||||
if cls_name not in self.additional_information:
|
||||
@ -3439,7 +3496,8 @@ class Scenario(EnviPathModel):
|
||||
new_ais = defaultdict(list)
|
||||
for k, vals in data.items():
|
||||
for v in vals:
|
||||
ai_data = json.loads(v.model_dump_json())
|
||||
# Clean for potential XSS hidden in the additional information fields.
|
||||
ai_data = json.loads(nh3.clean(v.model_dump_json()).strip())
|
||||
if hasattr(v, "uuid"):
|
||||
ai_data["uuid"] = str(v.uuid)
|
||||
else:
|
||||
|
||||
115
epdb/views.py
115
epdb/views.py
@ -10,6 +10,7 @@ from django.urls import reverse
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from envipy_additional_information import NAME_MAPPING
|
||||
from oauth2_provider.decorators import protected_resource
|
||||
import nh3
|
||||
|
||||
from utilities.chem import FormatConverter, IndigoUtils
|
||||
from utilities.decorators import package_permission_required
|
||||
@ -85,7 +86,10 @@ def login(request):
|
||||
from django.contrib.auth import authenticate
|
||||
from django.contrib.auth import login
|
||||
|
||||
username = request.POST.get("username")
|
||||
username = request.POST.get("username").strip()
|
||||
if username != request.POST.get("username"):
|
||||
context["message"] = "Login failed!"
|
||||
return render(request, "static/login.html", context)
|
||||
password = request.POST.get("password")
|
||||
|
||||
# Get email for username and check if the account is active
|
||||
@ -673,7 +677,8 @@ def search(request):
|
||||
|
||||
if request.method == "GET":
|
||||
package_urls = request.GET.getlist("packages")
|
||||
searchterm = request.GET.get("search")
|
||||
searchterm = request.GET.get("search").strip()
|
||||
|
||||
mode = request.GET.get("mode")
|
||||
|
||||
# add HTTP_ACCEPT check to differentiate between index and ajax call
|
||||
@ -774,7 +779,6 @@ def package_models(request, package_uuid):
|
||||
|
||||
elif request.method == "POST":
|
||||
log_post_params(request)
|
||||
|
||||
name = request.POST.get("model-name")
|
||||
description = request.POST.get("model-description")
|
||||
|
||||
@ -939,8 +943,14 @@ def package_model(request, package_uuid, model_uuid):
|
||||
else:
|
||||
return HttpResponseBadRequest()
|
||||
else:
|
||||
name = request.POST.get("model-name", "").strip()
|
||||
description = request.POST.get("model-description", "").strip()
|
||||
# TODO: Move cleaning to property updater
|
||||
name = request.POST.get("model-name")
|
||||
if name is not None:
|
||||
name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
description = request.POST.get("model-description")
|
||||
if description is not None:
|
||||
description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
if any([name, description]):
|
||||
if name:
|
||||
@ -1042,8 +1052,16 @@ def package(request, package_uuid):
|
||||
else:
|
||||
return HttpResponseBadRequest()
|
||||
|
||||
# TODO: Move cleaning to property updater
|
||||
new_package_name = request.POST.get("package-name")
|
||||
if new_package_name is not None:
|
||||
new_package_name = nh3.clean(new_package_name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
new_package_description = request.POST.get("package-description")
|
||||
if new_package_description is not None:
|
||||
new_package_description = nh3.clean(
|
||||
new_package_description, tags=s.ALLOWED_HTML_TAGS
|
||||
).strip()
|
||||
|
||||
grantee_url = request.POST.get("grantee")
|
||||
read = request.POST.get("read") == "on"
|
||||
@ -1152,7 +1170,7 @@ def package_compounds(request, package_uuid):
|
||||
|
||||
elif request.method == "POST":
|
||||
compound_name = request.POST.get("compound-name")
|
||||
compound_smiles = request.POST.get("compound-smiles")
|
||||
compound_smiles = request.POST.get("compound-smiles").strip()
|
||||
compound_description = request.POST.get("compound-description")
|
||||
|
||||
c = Compound.create(current_package, compound_smiles, compound_name, compound_description)
|
||||
@ -1205,8 +1223,16 @@ def package_compound(request, package_uuid, compound_uuid):
|
||||
|
||||
return JsonResponse({"success": current_compound.url})
|
||||
|
||||
new_compound_name = request.POST.get("compound-name", "").strip()
|
||||
new_compound_description = request.POST.get("compound-description", "").strip()
|
||||
# TODO: Move cleaning to property updater
|
||||
new_compound_name = request.POST.get("compound-name")
|
||||
if new_compound_name is not None:
|
||||
new_compound_name = nh3.clean(new_compound_name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
new_compound_description = request.POST.get("compound-description")
|
||||
if new_compound_description is not None:
|
||||
new_compound_description = nh3.clean(
|
||||
new_compound_description, tags=s.ALLOWED_HTML_TAGS
|
||||
).strip()
|
||||
|
||||
if new_compound_name:
|
||||
current_compound.name = new_compound_name
|
||||
@ -1271,7 +1297,7 @@ def package_compound_structures(request, package_uuid, compound_uuid):
|
||||
|
||||
elif request.method == "POST":
|
||||
structure_name = request.POST.get("structure-name")
|
||||
structure_smiles = request.POST.get("structure-smiles")
|
||||
structure_smiles = request.POST.get("structure-smiles").strip()
|
||||
structure_description = request.POST.get("structure-description")
|
||||
|
||||
try:
|
||||
@ -1342,8 +1368,16 @@ def package_compound_structure(request, package_uuid, compound_uuid, structure_u
|
||||
else:
|
||||
return HttpResponseBadRequest()
|
||||
|
||||
new_structure_name = request.POST.get("compound-structure-name", "").strip()
|
||||
new_structure_description = request.POST.get("compound-structure-description", "").strip()
|
||||
# TODO: Move cleaning to property updater
|
||||
new_structure_name = request.POST.get("compound-structure-name")
|
||||
if new_structure_name is not None:
|
||||
new_structure_name = nh3.clean(new_structure_name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
new_structure_description = request.POST.get("compound-structure-description")
|
||||
if new_structure_description is not None:
|
||||
new_structure_description = nh3.clean(
|
||||
new_structure_description, tags=s.ALLOWED_HTML_TAGS
|
||||
).strip()
|
||||
|
||||
if new_structure_name:
|
||||
current_structure.name = new_structure_name
|
||||
@ -1445,11 +1479,11 @@ def package_rules(request, package_uuid):
|
||||
|
||||
# Obtain parameters as required by rule type
|
||||
if rule_type == "SimpleAmbitRule":
|
||||
params["smirks"] = request.POST.get("rule-smirks")
|
||||
params["smirks"] = request.POST.get("rule-smirks").strip()
|
||||
params["reactant_filter_smarts"] = request.POST.get("rule-reactant-smarts")
|
||||
params["product_filter_smarts"] = request.POST.get("rule-product-smarts")
|
||||
elif rule_type == "SimpleRDKitRule":
|
||||
params["reaction_smarts"] = request.POST.get("rule-reaction-smarts")
|
||||
params["reaction_smarts"] = request.POST.get("rule-reaction-smarts").strip()
|
||||
elif rule_type == "ParallelRule":
|
||||
pass
|
||||
elif rule_type == "SequentialRule":
|
||||
@ -1550,8 +1584,14 @@ def package_rule(request, package_uuid, rule_uuid):
|
||||
|
||||
return JsonResponse({"success": current_rule.url})
|
||||
|
||||
rule_name = request.POST.get("rule-name", "").strip()
|
||||
rule_description = request.POST.get("rule-description", "").strip()
|
||||
# TODO: Move cleaning to property updater
|
||||
rule_name = request.POST.get("rule-name")
|
||||
if rule_name is not None:
|
||||
rule_name = nh3.clean(rule_name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
rule_description = request.POST.get("rule-description")
|
||||
if rule_description is not None:
|
||||
rule_description = nh3.clean(rule_description, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
if rule_name:
|
||||
current_rule.name = rule_name
|
||||
@ -1640,8 +1680,8 @@ def package_reactions(request, package_uuid):
|
||||
elif request.method == "POST":
|
||||
reaction_name = request.POST.get("reaction-name")
|
||||
reaction_description = request.POST.get("reaction-description")
|
||||
reactions_smirks = request.POST.get("reaction-smirks")
|
||||
|
||||
reactions_smirks = request.POST.get("reaction-smirks").strip()
|
||||
educts = reactions_smirks.split(">>")[0].split(".")
|
||||
products = reactions_smirks.split(">>")[1].split(".")
|
||||
|
||||
@ -1702,8 +1742,16 @@ def package_reaction(request, package_uuid, reaction_uuid):
|
||||
|
||||
return JsonResponse({"success": current_reaction.url})
|
||||
|
||||
new_reaction_name = request.POST.get("reaction-name", "").strip()
|
||||
new_reaction_description = request.POST.get("reaction-description", "").strip()
|
||||
# TODO: Move cleaning to property updater
|
||||
new_reaction_name = request.POST.get("reaction-name")
|
||||
if new_reaction_name is not None:
|
||||
new_reaction_name = nh3.clean(new_reaction_name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
new_reaction_description = request.POST.get("reaction-description")
|
||||
if new_reaction_description is not None:
|
||||
new_reaction_description = nh3.clean(
|
||||
new_reaction_description, tags=s.ALLOWED_HTML_TAGS
|
||||
).strip()
|
||||
|
||||
if new_reaction_name:
|
||||
current_reaction.name = new_reaction_name
|
||||
@ -1780,8 +1828,9 @@ def package_pathways(request, package_uuid):
|
||||
|
||||
name = request.POST.get("name")
|
||||
description = request.POST.get("description")
|
||||
pw_mode = request.POST.get("predict", "predict").strip()
|
||||
|
||||
smiles = request.POST.get("smiles", "").strip()
|
||||
pw_mode = request.POST.get("predict", "predict").strip()
|
||||
|
||||
if "smiles" in request.POST and smiles == "":
|
||||
return error(
|
||||
@ -1790,8 +1839,6 @@ def package_pathways(request, package_uuid):
|
||||
"Pathway prediction failed due to missing or empty SMILES",
|
||||
)
|
||||
|
||||
smiles = smiles.strip()
|
||||
|
||||
try:
|
||||
stand_smiles = FormatConverter.standardize(smiles)
|
||||
except ValueError:
|
||||
@ -1950,8 +1997,14 @@ def package_pathway(request, package_uuid, pathway_uuid):
|
||||
|
||||
return JsonResponse({"success": current_pathway.url})
|
||||
|
||||
# TODO: Move cleaning to property updater
|
||||
pathway_name = request.POST.get("pathway-name")
|
||||
if pathway_name is not None:
|
||||
pathway_name = nh3.clean(pathway_name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
pathway_description = request.POST.get("pathway-description")
|
||||
if pathway_description is not None:
|
||||
pathway_description = nh3.clean(pathway_description, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
if any([pathway_name, pathway_description]):
|
||||
if pathway_name is not None and pathway_name.strip() != "":
|
||||
@ -2039,8 +2092,8 @@ def package_pathway_nodes(request, package_uuid, pathway_uuid):
|
||||
elif request.method == "POST":
|
||||
node_name = request.POST.get("node-name")
|
||||
node_description = request.POST.get("node-description")
|
||||
node_smiles = request.POST.get("node-smiles")
|
||||
|
||||
node_smiles = request.POST.get("node-smiles").strip()
|
||||
current_pathway.add_node(node_smiles, name=node_name, description=node_description)
|
||||
|
||||
return redirect(current_pathway.url)
|
||||
@ -2202,9 +2255,9 @@ def package_pathway_edges(request, package_uuid, pathway_uuid):
|
||||
|
||||
elif request.method == "POST":
|
||||
log_post_params(request)
|
||||
|
||||
edge_name = request.POST.get("edge-name")
|
||||
edge_description = request.POST.get("edge-description")
|
||||
|
||||
edge_substrates = request.POST.getlist("edge-substrates")
|
||||
edge_products = request.POST.getlist("edge-products")
|
||||
|
||||
@ -2291,7 +2344,7 @@ def package_scenarios(request, package_uuid):
|
||||
"all", False
|
||||
):
|
||||
scens = Scenario.objects.filter(package=current_package).order_by("name")
|
||||
res = [{"name": s.name, "url": s.url, "uuid": s.uuid} for s in scens]
|
||||
res = [{"name": s_.name, "url": s_.url, "uuid": s_.uuid} for s_ in scens]
|
||||
return JsonResponse(res, safe=False)
|
||||
|
||||
context = get_base_context(request)
|
||||
@ -2339,21 +2392,21 @@ def package_scenarios(request, package_uuid):
|
||||
"name": "soil",
|
||||
"widgets": [
|
||||
HTMLGenerator.generate_html(ai, prefix=f"soil_{0}")
|
||||
for ai in [x for s in SOIL_ADDITIONAL_INFORMATION.values() for x in s]
|
||||
for ai in [x for sv in SOIL_ADDITIONAL_INFORMATION.values() for x in sv]
|
||||
],
|
||||
},
|
||||
"Sludge Data": {
|
||||
"name": "sludge",
|
||||
"widgets": [
|
||||
HTMLGenerator.generate_html(ai, prefix=f"sludge_{0}")
|
||||
for ai in [x for s in SLUDGE_ADDITIONAL_INFORMATION.values() for x in s]
|
||||
for ai in [x for sv in SLUDGE_ADDITIONAL_INFORMATION.values() for x in sv]
|
||||
],
|
||||
},
|
||||
"Water-Sediment System Data": {
|
||||
"name": "sediment",
|
||||
"widgets": [
|
||||
HTMLGenerator.generate_html(ai, prefix=f"sediment_{0}")
|
||||
for ai in [x for s in SEDIMENT_ADDITIONAL_INFORMATION.values() for x in s]
|
||||
for ai in [x for sv in SEDIMENT_ADDITIONAL_INFORMATION.values() for x in sv]
|
||||
],
|
||||
},
|
||||
}
|
||||
@ -2368,6 +2421,7 @@ def package_scenarios(request, package_uuid):
|
||||
|
||||
scenario_name = request.POST.get("scenario-name")
|
||||
scenario_description = request.POST.get("scenario-description")
|
||||
|
||||
scenario_date_year = request.POST.get("scenario-date-year")
|
||||
scenario_date_month = request.POST.get("scenario-date-month")
|
||||
scenario_date_day = request.POST.get("scenario-date-day")
|
||||
@ -2381,9 +2435,9 @@ def package_scenarios(request, package_uuid):
|
||||
scenario_type = request.POST.get("scenario-type")
|
||||
|
||||
additional_information = HTMLGenerator.build_models(request.POST.dict())
|
||||
additional_information = [x for s in additional_information.values() for x in s]
|
||||
additional_information = [x for sv in additional_information.values() for x in sv]
|
||||
|
||||
s = Scenario.create(
|
||||
new_scen = Scenario.create(
|
||||
current_package,
|
||||
name=scenario_name,
|
||||
description=scenario_description,
|
||||
@ -2392,7 +2446,7 @@ def package_scenarios(request, package_uuid):
|
||||
additional_information=additional_information,
|
||||
)
|
||||
|
||||
return redirect(s.url)
|
||||
return redirect(new_scen.url)
|
||||
else:
|
||||
return HttpResponseNotAllowed(
|
||||
[
|
||||
@ -2692,6 +2746,7 @@ def settings(request):
|
||||
|
||||
name = request.POST.get("prediction-setting-name")
|
||||
description = request.POST.get("prediction-setting-description")
|
||||
|
||||
new_default = request.POST.get("prediction-setting-new-default", "off") == "on"
|
||||
|
||||
max_nodes = min(
|
||||
|
||||
Reference in New Issue
Block a user