forked from enviPath/enviPy
Compare commits
64 Commits
enhancemen
...
devbayer
| Author | SHA1 | Date | |
|---|---|---|---|
| 93390b349b | |||
| 58ab5b33e3 | |||
| 73f0202267 | |||
| 27c5bad9c5 | |||
| 5789f20e7f | |||
| c0cfdb9255 | |||
| 5da8dbc191 | |||
| dc18b73e08 | |||
| d80dfb5ee3 | |||
| 9f63a9d4de | |||
| 5565b9cb9e | |||
| ab0b5a5186 | |||
| f905bf21cf | |||
| 1fd993927c | |||
| 2a2fe4f147 | |||
| 5f5ae76182 | |||
| 1c2f70b3b9 | |||
| 54f8302104 | |||
| 6499a0c659 | |||
| 7c60a28801 | |||
| a4a4179261 | |||
| 6ee4ac535a | |||
| d6065ee888 | |||
| 9db4806d75 | |||
| 4bf20e62ef | |||
| 8adb93012a | |||
| d2d475b990 | |||
| 648ec150a9 | |||
| 46b0f1c124 | |||
| d5af898053 | |||
| b7379b3337 | |||
| d6440f416c | |||
| 901de4640c | |||
| 69df139256 | |||
| e8ae494c16 | |||
| fd2e2c2534 | |||
| 1a2c9bb543 | |||
| 7f6f209b4a | |||
| b6c35fea76 | |||
| fa8a191383 | |||
| 67b1baa5b0 | |||
| 89c194dcca | |||
| a8554c903c | |||
| d584791ee8 | |||
| e60052b05c | |||
| 3ff8d938d6 | |||
| a7f48c2cf9 | |||
| 39faab3d11 | |||
| 4e80cd63cd | |||
| 6592f0a68e | |||
| 21d30a923f | |||
| 12a20756d6 | |||
| d20a705011 | |||
| debbef8158 | |||
| 2799718951 | |||
| 305fdc41fb | |||
| 9deca8867e | |||
| df6056fb86 | |||
| c1553d9cd4 | |||
| 2b79adc2f7 | |||
| ddf1fd3515 | |||
| 34589efbde | |||
| 1cccefa991 | |||
| e26d5a21e3 |
@ -20,3 +20,16 @@ LOG_LEVEL='INFO'
|
||||
SERVER_URL='http://localhost:8000'
|
||||
PLUGINS_ENABLED=True
|
||||
EP_DATA_DIR='data'
|
||||
EMAIL_HOST_USER='admin@envipath.com'
|
||||
EMAIL_HOST_PASSWORD='dummy-password'
|
||||
|
||||
DEFAULT_FROM_EMAIL="test@test.com"
|
||||
SERVER_EMAIL='test@test.com'
|
||||
|
||||
# Testing settings VScode
|
||||
DJANGO_SETTINGS_MODULE='envipath.settings'
|
||||
MANAGE_PY_PATH='./manage.py'
|
||||
|
||||
APPLICABILITY_DOMAIN_ENABLED=True
|
||||
ENVIFORMER_PRESENT=True
|
||||
MODEL_BUILDING_ENABLED=True
|
||||
|
||||
67
.gitea/actions/setup-envipy/action.yaml
Normal file
67
.gitea/actions/setup-envipy/action.yaml
Normal file
@ -0,0 +1,67 @@
|
||||
name: 'Setup enviPy Environment'
|
||||
description: 'Shared setup for enviPy CI - installs dependencies and prepares environment'
|
||||
|
||||
inputs:
|
||||
skip-frontend:
|
||||
description: 'Skip frontend build steps (pnpm, tailwind)'
|
||||
required: false
|
||||
default: 'false'
|
||||
skip-playwright:
|
||||
description: 'Skip Playwright installation'
|
||||
required: false
|
||||
default: 'false'
|
||||
ssh-private-key:
|
||||
description: 'SSH private key for git access'
|
||||
required: true
|
||||
run-migrations:
|
||||
description: 'Run Django migrations after setup'
|
||||
required: false
|
||||
default: 'true'
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Setup ssh
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p ~/.ssh
|
||||
echo "${{ inputs.ssh-private-key }}" > ~/.ssh/id_ed25519
|
||||
chmod 600 ~/.ssh/id_ed25519
|
||||
ssh-keyscan git.envipath.com >> ~/.ssh/known_hosts
|
||||
eval $(ssh-agent -s)
|
||||
ssh-add ~/.ssh/id_ed25519
|
||||
|
||||
- name: Setup Python venv
|
||||
shell: bash
|
||||
run: |
|
||||
uv sync --locked --all-extras --dev
|
||||
|
||||
- name: Install Playwright
|
||||
if: inputs.skip-playwright == 'false'
|
||||
shell: bash
|
||||
run: |
|
||||
source .venv/bin/activate
|
||||
playwright install --with-deps
|
||||
|
||||
- name: Build Frontend
|
||||
if: inputs.skip-frontend == 'false'
|
||||
shell: bash
|
||||
run: |
|
||||
uv run python scripts/pnpm_wrapper.py install
|
||||
uv run python scripts/pnpm_wrapper.py run build
|
||||
|
||||
- name: Wait for Postgres
|
||||
shell: bash
|
||||
run: |
|
||||
until pg_isready -h postgres -U ${{ env.POSTGRES_USER }}; do
|
||||
echo "Waiting for postgres..."
|
||||
sleep 2
|
||||
done
|
||||
echo "Postgres is ready!"
|
||||
|
||||
- name: Run Django Migrations
|
||||
if: inputs.run-migrations == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
source .venv/bin/activate
|
||||
python manage.py migrate --noinput
|
||||
53
.gitea/docker/Dockerfile.ci
Normal file
53
.gitea/docker/Dockerfile.ci
Normal file
@ -0,0 +1,53 @@
|
||||
# Custom CI Docker image for Gitea runners
|
||||
# Pre-installs Node.js 24, pnpm 10, uv, and system dependencies
|
||||
# to eliminate setup time in CI workflows
|
||||
|
||||
FROM ubuntu:24.04
|
||||
|
||||
# Prevent interactive prompts during package installation
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && \
|
||||
apt-get install -y \
|
||||
postgresql-client \
|
||||
redis-tools \
|
||||
openjdk-11-jre-headless \
|
||||
curl \
|
||||
ca-certificates \
|
||||
gnupg \
|
||||
lsb-release \
|
||||
git \
|
||||
ssh \
|
||||
libxrender1 \
|
||||
libxext6 \
|
||||
libfontconfig1 \
|
||||
libfreetype6 \
|
||||
libcairo2 \
|
||||
libglib2.0-0t64 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Node.js 24 via NodeSource
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_24.x | bash - && \
|
||||
apt-get install -y nodejs && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Enable corepack and install pnpm 10
|
||||
RUN corepack enable && \
|
||||
corepack prepare pnpm@10 --activate
|
||||
|
||||
# Install uv https://docs.astral.sh/uv/guides/integration/docker/#available-images
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
|
||||
ENV PATH="/root/.cargo/bin:${PATH}"
|
||||
|
||||
# Verify installations
|
||||
RUN node --version && \
|
||||
npm --version && \
|
||||
pnpm --version && \
|
||||
uv --version && \
|
||||
pg_isready --version && \
|
||||
redis-cli --version && \
|
||||
java -version
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /workspace
|
||||
86
.gitea/workflows/api-ci.yaml
Normal file
86
.gitea/workflows/api-ci.yaml
Normal file
@ -0,0 +1,86 @@
|
||||
name: API CI
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- 'epapi/**'
|
||||
- 'epdb/models.py' # API depends on models
|
||||
- 'epdb/logic.py' # API depends on business logic
|
||||
- 'tests/fixtures/**' # API tests use fixtures
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
api-tests:
|
||||
if: ${{ !contains(gitea.event.pull_request.title, 'WIP') }}
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: git.envipath.com/envipath/envipy-ci:latest
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16
|
||||
env:
|
||||
POSTGRES_USER: ${{ vars.POSTGRES_USER }}
|
||||
POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }}
|
||||
POSTGRES_DB: ${{ vars.POSTGRES_DB }}
|
||||
ports:
|
||||
- ${{ vars.POSTGRES_PORT}}:5432
|
||||
options: >-
|
||||
--health-cmd="pg_isready -U postgres"
|
||||
--health-interval=10s
|
||||
--health-timeout=5s
|
||||
--health-retries=5
|
||||
|
||||
env:
|
||||
RUNNER_TOOL_CACHE: /toolcache
|
||||
EP_DATA_DIR: /opt/enviPy/
|
||||
ALLOWED_HOSTS: 127.0.0.1,localhost
|
||||
DEBUG: True
|
||||
LOG_LEVEL: INFO
|
||||
MODEL_BUILDING_ENABLED: True
|
||||
APPLICABILITY_DOMAIN_ENABLED: True
|
||||
ENVIFORMER_PRESENT: True
|
||||
ENVIFORMER_DEVICE: cpu
|
||||
FLAG_CELERY_PRESENT: False
|
||||
PLUGINS_ENABLED: True
|
||||
SERVER_URL: http://localhost:8000
|
||||
ADMIN_APPROVAL_REQUIRED: True
|
||||
REGISTRATION_MANDATORY: True
|
||||
LOG_DIR: ''
|
||||
# DB
|
||||
POSTGRES_SERVICE_NAME: postgres
|
||||
POSTGRES_DB: ${{ vars.POSTGRES_DB }}
|
||||
POSTGRES_USER: ${{ vars.POSTGRES_USER }}
|
||||
POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }}
|
||||
POSTGRES_PORT: 5432
|
||||
# SENTRY
|
||||
SENTRY_ENABLED: False
|
||||
# MS ENTRA
|
||||
MS_ENTRA_ENABLED: False
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Use shared setup action - skips frontend builds for API-only tests
|
||||
- name: Setup enviPy Environment
|
||||
uses: ./.gitea/actions/setup-envipy
|
||||
with:
|
||||
skip-frontend: 'true'
|
||||
skip-playwright: 'false'
|
||||
ssh-private-key: ${{ secrets.ENVIPY_CI_PRIVATE_KEY }}
|
||||
run-migrations: 'true'
|
||||
|
||||
- name: Run API tests
|
||||
run: |
|
||||
.venv/bin/python manage.py test epapi -v 2
|
||||
|
||||
- name: Test API endpoints availability
|
||||
run: |
|
||||
.venv/bin/python manage.py runserver 0.0.0.0:8000 &
|
||||
SERVER_PID=$!
|
||||
sleep 5
|
||||
curl -f http://localhost:8000/api/v1/docs || echo "API docs not available"
|
||||
kill $SERVER_PID
|
||||
48
.gitea/workflows/build-ci-image.yaml
Normal file
48
.gitea/workflows/build-ci-image.yaml
Normal file
@ -0,0 +1,48 @@
|
||||
name: Build CI Docker Image
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- main
|
||||
paths:
|
||||
- '.gitea/docker/Dockerfile.ci'
|
||||
- '.gitea/workflows/build-ci-image.yaml'
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to container registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: git.envipath.com
|
||||
username: ${{ secrets.CI_REGISTRY_USER }}
|
||||
password: ${{ secrets.CI_REGISTRY_PASSWORD }}
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: git.envipath.com/envipath/envipy-ci
|
||||
tags: |
|
||||
type=raw,value=latest
|
||||
type=sha,prefix={{branch}}-
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: .gitea/docker/Dockerfile.ci
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=registry,ref=git.envipath.com/envipath/envipy-ci:latest
|
||||
cache-to: type=inline
|
||||
87
.gitea/workflows/ci.yaml
Normal file
87
.gitea/workflows/ci.yaml
Normal file
@ -0,0 +1,87 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test:
|
||||
if: ${{ !contains(gitea.event.pull_request.title, 'WIP') }}
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: git.envipath.com/envipath/envipy-ci:latest
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16
|
||||
env:
|
||||
POSTGRES_USER: ${{ vars.POSTGRES_USER }}
|
||||
POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }}
|
||||
POSTGRES_DB: ${{ vars.POSTGRES_DB }}
|
||||
ports:
|
||||
- ${{ vars.POSTGRES_PORT}}:5432
|
||||
options: >-
|
||||
--health-cmd="pg_isready -U postgres"
|
||||
--health-interval=10s
|
||||
--health-timeout=5s
|
||||
--health-retries=5
|
||||
|
||||
#redis:
|
||||
# image: redis:7
|
||||
# ports:
|
||||
# - 6379:6379
|
||||
# options: >-
|
||||
# --health-cmd "redis-cli ping"
|
||||
# --health-interval=10s
|
||||
# --health-timeout=5s
|
||||
# --health-retries=5
|
||||
|
||||
env:
|
||||
RUNNER_TOOL_CACHE: /toolcache
|
||||
EP_DATA_DIR: /opt/enviPy/
|
||||
ALLOWED_HOSTS: 127.0.0.1,localhost
|
||||
DEBUG: True
|
||||
LOG_LEVEL: INFO
|
||||
MODEL_BUILDING_ENABLED: True
|
||||
APPLICABILITY_DOMAIN_ENABLED: True
|
||||
ENVIFORMER_PRESENT: True
|
||||
ENVIFORMER_DEVICE: cpu
|
||||
FLAG_CELERY_PRESENT: False
|
||||
PLUGINS_ENABLED: True
|
||||
SERVER_URL: http://localhost:8000
|
||||
ADMIN_APPROVAL_REQUIRED: True
|
||||
REGISTRATION_MANDATORY: True
|
||||
LOG_DIR: ''
|
||||
# DB
|
||||
POSTGRES_SERVICE_NAME: postgres
|
||||
POSTGRES_DB: ${{ vars.POSTGRES_DB }}
|
||||
POSTGRES_USER: ${{ vars.POSTGRES_USER }}
|
||||
POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }}
|
||||
POSTGRES_PORT: 5432
|
||||
# SENTRY
|
||||
SENTRY_ENABLED: False
|
||||
# MS ENTRA
|
||||
MS_ENTRA_ENABLED: False
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Use shared setup action - includes all dependencies and migrations
|
||||
- name: Setup enviPy Environment
|
||||
uses: ./.gitea/actions/setup-envipy
|
||||
with:
|
||||
skip-frontend: 'false'
|
||||
skip-playwright: 'false'
|
||||
ssh-private-key: ${{ secrets.ENVIPY_CI_PRIVATE_KEY }}
|
||||
run-migrations: 'true'
|
||||
|
||||
- name: Run frontend tests
|
||||
run: |
|
||||
.venv/bin/python manage.py test --tag frontend
|
||||
|
||||
- name: Run Django tests
|
||||
run: |
|
||||
.venv/bin/python manage.py test tests --exclude-tag slow --exclude-tag frontend
|
||||
369
.gitignore
vendored
369
.gitignore
vendored
@ -1,12 +1,375 @@
|
||||
*.pyc
|
||||
|
||||
|
||||
|
||||
### Python ###
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[codz]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py.cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
.idea/
|
||||
db.sqlite3-journal
|
||||
static/admin/
|
||||
static/django_extensions/
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
|
||||
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
|
||||
# pdm.lock
|
||||
# pdm.toml
|
||||
.pdm-python
|
||||
.pdm-build/
|
||||
|
||||
# pixi
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
|
||||
# pixi.lock
|
||||
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
|
||||
# in the .venv directory. It is recommended not to include this directory in version control.
|
||||
.pixi
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# Redis
|
||||
*.rdb
|
||||
*.aof
|
||||
*.pid
|
||||
|
||||
# RabbitMQ
|
||||
mnesia/
|
||||
rabbitmq/
|
||||
rabbitmq-data/
|
||||
|
||||
# ActiveMQ
|
||||
activemq-data/
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.envrc
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
.idea/
|
||||
|
||||
# Abstra
|
||||
# Abstra is an AI-powered process automation framework.
|
||||
# Ignore directories containing user credentials, local state, and settings.
|
||||
# Learn more at https://abstra.io/docs
|
||||
.abstra/
|
||||
|
||||
# Visual Studio Code
|
||||
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
|
||||
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. However, if you prefer,
|
||||
# you could uncomment the following to ignore the entire vscode folder
|
||||
.vscode/
|
||||
*.code-workspace
|
||||
|
||||
# Ruff stuff:
|
||||
.ruff_cache/
|
||||
|
||||
# UV cache
|
||||
.uv-cache/
|
||||
|
||||
# PyPI configuration file
|
||||
.pypirc
|
||||
|
||||
# Marimo
|
||||
marimo/_static/
|
||||
marimo/_lsp/
|
||||
__marimo__/
|
||||
|
||||
# Streamlit
|
||||
.streamlit/secrets.toml
|
||||
|
||||
### Agents ###
|
||||
.claude/
|
||||
.codex/
|
||||
.cursor/
|
||||
.github/prompts/
|
||||
.junie/
|
||||
.windsurf/
|
||||
|
||||
AGENTS.md
|
||||
CLAUDE.md
|
||||
GEMINI.md
|
||||
.aider.*
|
||||
|
||||
### Node.js ###
|
||||
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# Snowpack dependency directory (https://snowpack.dev/)
|
||||
web_modules/
|
||||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Optional stylelint cache
|
||||
.stylelintcache
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variable files
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
.cache
|
||||
.parcel-cache
|
||||
|
||||
# Next.js build output
|
||||
.next
|
||||
out
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
.nuxt
|
||||
dist
|
||||
.output
|
||||
|
||||
# Gatsby files
|
||||
.cache/
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||
# public
|
||||
|
||||
# vuepress build output
|
||||
.vuepress/dist
|
||||
|
||||
# vuepress v2.x temp and cache directory
|
||||
.temp
|
||||
.cache
|
||||
|
||||
# Sveltekit cache directory
|
||||
.svelte-kit/
|
||||
|
||||
# vitepress build output
|
||||
**/.vitepress/dist
|
||||
|
||||
# vitepress cache directory
|
||||
**/.vitepress/cache
|
||||
|
||||
# Docusaurus cache and generated files
|
||||
.docusaurus
|
||||
|
||||
# Serverless directories
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
# Firebase cache directory
|
||||
.firebase/
|
||||
|
||||
# TernJS port file
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
.vscode-test
|
||||
|
||||
# yarn v3
|
||||
.pnp.*
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/plugins
|
||||
!.yarn/releases
|
||||
!.yarn/sdks
|
||||
!.yarn/versions
|
||||
|
||||
# Vite files
|
||||
vite.config.js.timestamp-*
|
||||
vite.config.ts.timestamp-*
|
||||
.vite/
|
||||
|
||||
### Custom ###
|
||||
|
||||
debug.log
|
||||
scratches/
|
||||
|
||||
test-results/
|
||||
data/
|
||||
*.arff
|
||||
|
||||
# Auto generated
|
||||
static/css/output.css
|
||||
|
||||
# macOS system files
|
||||
.DS_Store
|
||||
.Trashes
|
||||
._*
|
||||
|
||||
@ -8,6 +8,7 @@ repos:
|
||||
- id: end-of-file-fixer
|
||||
- id: check-yaml
|
||||
- id: check-added-large-files
|
||||
exclude: ^static/images/
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.13.3
|
||||
@ -20,6 +21,15 @@ repos:
|
||||
- id: ruff-format
|
||||
types_or: [python, pyi]
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: prettier-jinja-templates
|
||||
name: Format Jinja templates with Prettier
|
||||
entry: pnpm exec prettier --plugin=prettier-plugin-jinja-template --parser=jinja-template --write
|
||||
language: system
|
||||
types: [file]
|
||||
files: ^templates/.*\.html$
|
||||
|
||||
# - repo: local
|
||||
# hooks:
|
||||
# - id: django-check
|
||||
|
||||
11
.prettierrc.json
Normal file
11
.prettierrc.json
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"plugins": ["prettier-plugin-jinja-template", "prettier-plugin-tailwindcss"],
|
||||
"overrides": [
|
||||
{
|
||||
"files": "templates/**/*.html",
|
||||
"options": {
|
||||
"parser": "jinja-template"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
54
README.md
54
README.md
@ -7,11 +7,12 @@ These instructions will guide you through setting up the project for local devel
|
||||
### Prerequisites
|
||||
|
||||
- Python 3.11 or later
|
||||
- [uv](https://github.com/astral-sh/uv) - A fast Python package installer and resolver.
|
||||
- **Docker and Docker Compose** - Required for running the PostgreSQL database.
|
||||
- [uv](https://github.com/astral-sh/uv) - Python package manager
|
||||
- **Docker and Docker Compose** - Required for running PostgreSQL database and Redis (for async Celery tasks)
|
||||
- Git
|
||||
- Make
|
||||
|
||||
> **Note:** This application requires PostgreSQL, which uses `ArrayField`. Docker is the recommended way to run PostgreSQL locally.
|
||||
> **Note:** This application requires PostgreSQL (uses `ArrayField`). Docker is the easiest way to run PostgreSQL locally.
|
||||
|
||||
### 1. Install Dependencies
|
||||
|
||||
@ -23,7 +24,12 @@ Then, sync the project dependencies. This will create a virtual environment in `
|
||||
uv sync --dev
|
||||
```
|
||||
|
||||
> **Note on RDkit:** If you have a different version of rdkit installed globally, the dependency installation may fail. If this happens, please uninstall the global version and run `uv sync` again.
|
||||
Note on RDkit installation: if you have rdkit installed on your system globally with a different version of python, the installation will try to link against that and subsequent calls fail. Only option remove global rdkit and rerun sync.
|
||||
|
||||
---
|
||||
|
||||
The frontend requires `pnpm` to correctly display in development.
|
||||
[Install it here](https://pnpm.io/installation).
|
||||
|
||||
### 2. Set Up Environment File
|
||||
|
||||
@ -44,6 +50,7 @@ uv run poe setup
|
||||
```
|
||||
|
||||
This single command will:
|
||||
|
||||
1. Start the PostgreSQL database using Docker Compose.
|
||||
2. Run database migrations.
|
||||
3. Bootstrap initial data (anonymous user, default packages, models).
|
||||
@ -54,9 +61,12 @@ After setup, start the development server:
|
||||
uv run poe dev
|
||||
```
|
||||
|
||||
This will start the css-watcher as well as the django-development server,
|
||||
The application will be available at `http://localhost:8000`.
|
||||
|
||||
#### Other useful Poe commands:
|
||||
**Note:** The development server automatically starts a CSS watcher (`pnpm run dev`) alongside the Django server to rebuild CSS files when changes are detected. This ensures your styles are always up-to-date during development.
|
||||
|
||||
#### Other useful Poe commands
|
||||
|
||||
You can list all available commands by running `uv run poe --help`.
|
||||
|
||||
@ -66,22 +76,46 @@ uv run poe db-down # Stop PostgreSQL
|
||||
uv run poe migrate # Run migrations only
|
||||
uv run poe bootstrap # Bootstrap data only
|
||||
uv run poe shell # Open the Django shell
|
||||
uv run poe build # Build frontend assets and collect static files
|
||||
uv run poe clean # Remove database volumes (WARNING: destroys all data)
|
||||
uv run poe celery # Start Celery worker for async task processing
|
||||
uv run poe celery-dev # Start database and Celery worker
|
||||
```
|
||||
|
||||
### 4. Async Celery Setup (Optional)
|
||||
|
||||
By default, Celery tasks run synchronously (`CELERY_TASK_ALWAYS_EAGER = True`), which means prediction tasks block the HTTP request until completion. To enable asynchronous task processing with live status updates on pathway pages:
|
||||
|
||||
1. **Set the Celery flag in your `.env` file:**
|
||||
|
||||
```bash
|
||||
FLAG_CELERY_PRESENT=True
|
||||
```
|
||||
|
||||
2. **Start Redis and Celery worker:**
|
||||
|
||||
```bash
|
||||
uv run poe celery-dev
|
||||
```
|
||||
|
||||
3. **Start the development server** (in another terminal):
|
||||
```bash
|
||||
uv run poe dev
|
||||
```
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
* **Docker Connection Error:** If you see an error like `open //./pipe/dockerDesktopLinuxEngine: The system cannot find the file specified` (on Windows), it likely means your Docker Desktop application is not running. Please start Docker Desktop and try the command again.
|
||||
|
||||
* **SSH Keys for Git Dependencies:** Some dependencies are installed from private git repositories and require SSH authentication. Ensure your SSH keys are configured correctly for Git.
|
||||
* For a general guide, see [GitHub's official documentation](https://docs.github.com/en/authentication/connecting-to-github-with-ssh/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent).
|
||||
* **Windows Users:** If `uv sync` hangs while fetching git dependencies, you may need to explicitly configure Git to use the Windows OpenSSH client and use the `ssh-agent` to manage your key's passphrase.
|
||||
- **Docker Connection Error:** If you see an error like `open //./pipe/dockerDesktopLinuxEngine: The system cannot find the file specified` (on Windows), it likely means your Docker Desktop application is not running. Please start Docker Desktop and try the command again.
|
||||
|
||||
- **SSH Keys for Git Dependencies:** Some dependencies are installed from private git repositories and require SSH authentication. Ensure your SSH keys are configured correctly for Git.
|
||||
- For a general guide, see [GitHub's official documentation](https://docs.github.com/en/authentication/connecting-to-github-with-ssh/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent).
|
||||
- **Windows Users:** If `uv sync` hangs while fetching git dependencies, you may need to explicitly configure Git to use the Windows OpenSSH client and use the `ssh-agent` to manage your key's passphrase.
|
||||
1. **Point Git to the correct SSH executable:**
|
||||
```powershell
|
||||
git config --global core.sshCommand "C:/Windows/System32/OpenSSH/ssh.exe"
|
||||
```
|
||||
2. **Enable and use the SSH agent:**
|
||||
|
||||
```powershell
|
||||
# Run these commands in an administrator PowerShell
|
||||
Get-Service ssh-agent | Set-Service -StartupType Automatic -PassThru | Start-Service
|
||||
|
||||
0
bayer/__init__.py
Normal file
0
bayer/__init__.py
Normal file
3
bayer/admin.py
Normal file
3
bayer/admin.py
Normal file
@ -0,0 +1,3 @@
|
||||
from django.contrib import admin
|
||||
|
||||
# Register your models here.
|
||||
6
bayer/apps.py
Normal file
6
bayer/apps.py
Normal file
@ -0,0 +1,6 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class BayerConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'bayer'
|
||||
35
bayer/migrations/0001_initial.py
Normal file
35
bayer/migrations/0001_initial.py
Normal file
@ -0,0 +1,35 @@
|
||||
# Generated by Django 5.2.7 on 2026-02-12 12:36
|
||||
|
||||
import django.utils.timezone
|
||||
import model_utils.fields
|
||||
import uuid
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Package',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
|
||||
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, unique=True, verbose_name='UUID of this object')),
|
||||
('name', models.TextField(default='no name', verbose_name='Name')),
|
||||
('description', models.TextField(default='no description', verbose_name='Descriptions')),
|
||||
('url', models.TextField(null=True, unique=True, verbose_name='URL')),
|
||||
('kv', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('reviewed', models.BooleanField(default=False, verbose_name='Reviewstatus')),
|
||||
('classification_level', models.IntegerField(choices=[(0, 'Internal'), (10, 'Restricted'), (20, 'Secret')], default=10)),
|
||||
],
|
||||
options={
|
||||
'db_table': 'epdb_package',
|
||||
},
|
||||
),
|
||||
]
|
||||
22
bayer/migrations/0002_initial.py
Normal file
22
bayer/migrations/0002_initial.py
Normal file
@ -0,0 +1,22 @@
|
||||
# Generated by Django 5.2.7 on 2026-02-12 12:36
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('bayer', '0001_initial'),
|
||||
('epdb', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='package',
|
||||
name='license',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='epdb.license', verbose_name='License'),
|
||||
),
|
||||
]
|
||||
0
bayer/migrations/__init__.py
Normal file
0
bayer/migrations/__init__.py
Normal file
95
bayer/models.py
Normal file
95
bayer/models.py
Normal file
@ -0,0 +1,95 @@
|
||||
from typing import List
|
||||
|
||||
from django.conf import settings as s
|
||||
from django.db import models
|
||||
from django.db.models import QuerySet
|
||||
|
||||
from epdb.models import (
|
||||
EnviPathModel,
|
||||
ParallelRule,
|
||||
SequentialRule,
|
||||
SimpleAmbitRule,
|
||||
SimpleRDKitRule,
|
||||
)
|
||||
|
||||
|
||||
class Package(EnviPathModel):
|
||||
reviewed = models.BooleanField(verbose_name="Reviewstatus", default=False)
|
||||
license = models.ForeignKey(
|
||||
"epdb.License", on_delete=models.SET_NULL, blank=True, null=True, verbose_name="License"
|
||||
)
|
||||
|
||||
class Classification(models.IntegerChoices):
|
||||
INTERNAL = 0, "Internal"
|
||||
RESTRICTED = 10 , "Restricted"
|
||||
SECRET = 20, "Secret"
|
||||
|
||||
classification_level = models.IntegerField(
|
||||
choices=Classification,
|
||||
default=Classification.RESTRICTED,
|
||||
)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
# explicitly handle related Rules
|
||||
for r in self.rules.all():
|
||||
r.delete()
|
||||
super().delete(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name} (pk={self.pk})"
|
||||
|
||||
@property
|
||||
def compounds(self) -> QuerySet:
|
||||
return self.compound_set.all()
|
||||
|
||||
@property
|
||||
def rules(self) -> QuerySet:
|
||||
return self.rule_set.all()
|
||||
|
||||
@property
|
||||
def reactions(self) -> QuerySet:
|
||||
return self.reaction_set.all()
|
||||
|
||||
@property
|
||||
def pathways(self) -> QuerySet:
|
||||
return self.pathway_set.all()
|
||||
|
||||
@property
|
||||
def scenarios(self) -> QuerySet:
|
||||
return self.scenario_set.all()
|
||||
|
||||
@property
|
||||
def models(self) -> QuerySet:
|
||||
return self.epmodel_set.all()
|
||||
|
||||
def _url(self):
|
||||
return "{}/package/{}".format(s.SERVER_URL, self.uuid)
|
||||
|
||||
def get_applicable_rules(self) -> List["Rule"]:
|
||||
"""
|
||||
Returns a ordered set of rules where the following applies:
|
||||
1. All Composite will be added to result
|
||||
2. All SimpleRules will be added if theres no CompositeRule present using the SimpleRule
|
||||
Ordering is based on "url" field.
|
||||
"""
|
||||
rules = []
|
||||
rule_qs = self.rules
|
||||
|
||||
reflected_simple_rules = set()
|
||||
|
||||
for r in rule_qs:
|
||||
if isinstance(r, ParallelRule) or isinstance(r, SequentialRule):
|
||||
rules.append(r)
|
||||
for sr in r.simple_rules.all():
|
||||
reflected_simple_rules.add(sr)
|
||||
|
||||
for r in rule_qs:
|
||||
if isinstance(r, SimpleAmbitRule) or isinstance(r, SimpleRDKitRule):
|
||||
if r not in reflected_simple_rules:
|
||||
rules.append(r)
|
||||
|
||||
rules = sorted(rules, key=lambda x: x.url)
|
||||
return rules
|
||||
|
||||
class Meta:
|
||||
db_table = "epdb_package"
|
||||
97
bayer/templates/objects/package.html
Normal file
97
bayer/templates/objects/package.html
Normal file
@ -0,0 +1,97 @@
|
||||
{% extends "framework_modern.html" %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
{% block action_modals %}
|
||||
{% include "modals/objects/edit_package_modal.html" %}
|
||||
{% include "modals/objects/edit_package_permissions_modal.html" %}
|
||||
{% include "modals/objects/publish_package_modal.html" %}
|
||||
{% include "modals/objects/set_license_modal.html" %}
|
||||
{% include "modals/objects/export_package_modal.html" %}
|
||||
{% include "modals/objects/generic_delete_modal.html" %}
|
||||
{% endblock action_modals %}
|
||||
|
||||
<div class="space-y-2 p-4">
|
||||
<!-- Header Section -->
|
||||
<div class="card bg-base-100">
|
||||
<div class="card-body">
|
||||
<div class="flex items-center justify-between">
|
||||
<h2 class="card-title text-2xl">{{ package.name }} - <i>{{ package.get_classification_level_display }}</i></h2>
|
||||
<div id="actionsButton" class="dropdown dropdown-e nd hidden">
|
||||
<div tabindex="0" role="button" class="btn btn-ghost btn-sm">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
width="16"
|
||||
height="16"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
stroke-width="2"
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
class="lucide lucide-wrench"
|
||||
>
|
||||
<path
|
||||
d="M14.7 6.3a1 1 0 0 0 0 1.4l1.6 1.6a1 1 0 0 0 1.4 0l3.77-3.77a6 6 0 0 1-7.94 7.94l-6.91 6.91a2.12 2.12 0 0 1-3-3l6.91-6.91a6 6 0 0 1 7.94-7.94l-3.76 3.76z"
|
||||
/>
|
||||
</svg>
|
||||
Actions
|
||||
</div>
|
||||
<ul
|
||||
tabindex="-1"
|
||||
class="dropdown-content menu bg-base-100 rounded-box z-50 w-52 p-2"
|
||||
>
|
||||
{% block actions %}
|
||||
{% include "actions/objects/package.html" %}
|
||||
{% endblock %}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<p class="mt-2">{{ package.description|safe }}</p>
|
||||
<ul class="menu bg-base-200 rounded-box mt-4 w-full">
|
||||
<li>
|
||||
<a href="{{ package.url }}/pathway" class="hover:bg-base-300"
|
||||
>Pathways ({{ package.pathways.count }})</a
|
||||
>
|
||||
</li>
|
||||
<li>
|
||||
<a href="{{ package.url }}/rule" class="hover:bg-base-300"
|
||||
>Rules ({{ package.rules.count }})</a
|
||||
>
|
||||
</li>
|
||||
<li>
|
||||
<a href="{{ package.url }}/compound" class="hover:bg-base-300"
|
||||
>Compounds ({{ package.compounds.count }})</a
|
||||
>
|
||||
</li>
|
||||
<li>
|
||||
<a href="{{ package.url }}/reaction" class="hover:bg-base-300"
|
||||
>Reactions ({{ package.reactions.count }})</a
|
||||
>
|
||||
</li>
|
||||
<li>
|
||||
<a href="{{ package.url }}/model" class="hover:bg-base-300"
|
||||
>Models ({{ package.models.count }})</a
|
||||
>
|
||||
</li>
|
||||
<li>
|
||||
<a href="{{ package.url }}/scenario" class="hover:bg-base-300"
|
||||
>Scenarios ({{ package.scenarios.count }})</a
|
||||
>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
// Show actions button if there are actions
|
||||
document.addEventListener("DOMContentLoaded", function () {
|
||||
const actionsButton = document.getElementById("actionsButton");
|
||||
const actionsList = actionsButton?.querySelector("ul");
|
||||
if (actionsList && actionsList.children.length > 0) {
|
||||
actionsButton?.classList.remove("hidden");
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% endblock content %}
|
||||
3
bayer/tests.py
Normal file
3
bayer/tests.py
Normal file
@ -0,0 +1,3 @@
|
||||
from django.test import TestCase
|
||||
|
||||
# Create your tests here.
|
||||
3
bayer/views.py
Normal file
3
bayer/views.py
Normal file
@ -0,0 +1,3 @@
|
||||
from django.shortcuts import render
|
||||
|
||||
# Create your views here.
|
||||
@ -1,6 +1,6 @@
|
||||
services:
|
||||
db:
|
||||
image: postgres:15
|
||||
image: postgres:18
|
||||
container_name: envipath-postgres
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
@ -9,12 +9,18 @@ services:
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
- postgres_data:/var/lib/postgresql
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U postgres"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
container_name: envipath-redis
|
||||
ports:
|
||||
- "6379:6379"
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
from epdb.api import router as epdb_app_router
|
||||
from epapi.v1.router import router as v1_router # Refactored API from epdb.api_v2
|
||||
from epdb.legacy_api import router as epdb_legacy_app_router
|
||||
from ninja import NinjaAPI
|
||||
|
||||
@ -8,5 +8,5 @@ api_v1 = NinjaAPI(title="API V1 Docs", urls_namespace="api-v1")
|
||||
api_legacy = NinjaAPI(title="Legacy API Docs", urls_namespace="api-legacy")
|
||||
|
||||
# Add routers
|
||||
api_v1.add_router("/", epdb_app_router)
|
||||
api_v1.add_router("/", v1_router)
|
||||
api_legacy.add_router("/", epdb_legacy_app_router)
|
||||
|
||||
@ -21,7 +21,9 @@ from sklearn.tree import DecisionTreeClassifier
|
||||
# Build paths inside the project like this: BASE_DIR / 'subdir'.
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
|
||||
load_dotenv(BASE_DIR / ".env", override=False)
|
||||
ENV_PATH = os.environ.get("ENV_PATH", BASE_DIR / ".env")
|
||||
print(f"Loading env from {ENV_PATH}")
|
||||
load_dotenv(ENV_PATH, override=False)
|
||||
|
||||
# Quick-start development settings - unsuitable for production
|
||||
# See https://docs.djangoproject.com/en/4.2/howto/deployment/checklist/
|
||||
@ -48,10 +50,26 @@ INSTALLED_APPS = [
|
||||
"django_extensions",
|
||||
"oauth2_provider",
|
||||
# Custom
|
||||
"epapi", # API endpoints (v1, etc.)
|
||||
"epdb",
|
||||
"migration",
|
||||
"bayer",
|
||||
]
|
||||
|
||||
TENANT = os.environ.get("TENANT", "public")
|
||||
|
||||
if TENANT != "public":
|
||||
INSTALLED_APPS.append(TENANT)
|
||||
|
||||
EPDB_PACKAGE_MODEL = os.environ.get("EPDB_PACKAGE_MODEL", "epdb.Package")
|
||||
|
||||
|
||||
def GET_PACKAGE_MODEL():
|
||||
from django.apps import apps
|
||||
|
||||
return apps.get_model(EPDB_PACKAGE_MODEL)
|
||||
|
||||
|
||||
AUTHENTICATION_BACKENDS = [
|
||||
"django.contrib.auth.backends.ModelBackend",
|
||||
]
|
||||
@ -76,10 +94,15 @@ if os.environ.get("REGISTRATION_MANDATORY", False) == "True":
|
||||
|
||||
ROOT_URLCONF = "envipath.urls"
|
||||
|
||||
TEMPLATE_DIRS = [
|
||||
os.path.join(BASE_DIR, "bayer", "templates"),
|
||||
os.path.join(BASE_DIR, "templates"),
|
||||
]
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": (os.path.join(BASE_DIR, "templates"),),
|
||||
"DIRS": TEMPLATE_DIRS,
|
||||
"APP_DIRS": True,
|
||||
"OPTIONS": {
|
||||
"context_processors": [
|
||||
@ -87,11 +110,14 @@ TEMPLATES = [
|
||||
"django.template.context_processors.request",
|
||||
"django.contrib.auth.context_processors.auth",
|
||||
"django.contrib.messages.context_processors.messages",
|
||||
"epdb.context_processors.package_context",
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
ALLOWED_HTML_TAGS = {"b", "i", "u", "br", "em", "mark", "p", "s", "strong"}
|
||||
|
||||
WSGI_APPLICATION = "envipath.wsgi.application"
|
||||
|
||||
# Database
|
||||
@ -181,6 +207,12 @@ if not os.path.exists(LOG_DIR):
|
||||
os.mkdir(LOG_DIR)
|
||||
|
||||
PLUGIN_DIR = os.path.join(EP_DATA_DIR, "plugins")
|
||||
|
||||
API_PAGINATION_DEFAULT_PAGE_SIZE = int(os.environ.get("API_PAGINATION_DEFAULT_PAGE_SIZE", 50))
|
||||
PAGINATION_MAX_PER_PAGE_SIZE = int(
|
||||
os.environ.get("API_PAGINATION_MAX_PAGE_SIZE", 100)
|
||||
) # Ninja override
|
||||
|
||||
if not os.path.exists(PLUGIN_DIR):
|
||||
os.mkdir(PLUGIN_DIR)
|
||||
|
||||
@ -243,6 +275,7 @@ LOGGING = {
|
||||
ENVIFORMER_PRESENT = os.environ.get("ENVIFORMER_PRESENT", "False") == "True"
|
||||
ENVIFORMER_DEVICE = os.environ.get("ENVIFORMER_DEVICE", "cpu")
|
||||
|
||||
|
||||
# If celery is not present set always eager to true which will cause delayed tasks to block until finished
|
||||
FLAG_CELERY_PRESENT = os.environ.get("FLAG_CELERY_PRESENT", "False") == "True"
|
||||
if not FLAG_CELERY_PRESENT:
|
||||
@ -337,12 +370,21 @@ FLAGS = {
|
||||
# -> /password_reset/done is covered as well
|
||||
LOGIN_EXEMPT_URLS = [
|
||||
"/register",
|
||||
"/api/v1/", # Let API handle its own authentication
|
||||
"/api/legacy/",
|
||||
"/o/token/",
|
||||
"/o/userinfo/",
|
||||
"/password_reset/",
|
||||
"/reset/",
|
||||
"/microsoft/",
|
||||
"/terms",
|
||||
"/privacy",
|
||||
"/cookie-policy",
|
||||
"/about",
|
||||
"/contact",
|
||||
"/careers",
|
||||
"/cite",
|
||||
"/legal",
|
||||
]
|
||||
|
||||
# MS AD/Entra
|
||||
|
||||
@ -23,12 +23,20 @@ from .api import api_v1, api_legacy
|
||||
|
||||
urlpatterns = [
|
||||
path("", include("epdb.urls")),
|
||||
path("", include("migration.urls")),
|
||||
path("admin/", admin.site.urls),
|
||||
path("api/v1/", api_v1.urls),
|
||||
path("api/legacy/", api_legacy.urls),
|
||||
path("o/", include("oauth2_provider.urls", namespace="oauth2_provider")),
|
||||
]
|
||||
|
||||
if "migration" in s.INSTALLED_APPS:
|
||||
urlpatterns.append(path("", include("migration.urls")))
|
||||
|
||||
if s.MS_ENTRA_ENABLED:
|
||||
urlpatterns.append(path("", include("epauth.urls")))
|
||||
|
||||
# Custom error handlers
|
||||
handler400 = "epdb.views.handler400"
|
||||
handler403 = "epdb.views.handler403"
|
||||
handler404 = "epdb.views.handler404"
|
||||
handler500 = "epdb.views.handler500"
|
||||
|
||||
0
epapi/__init__.py
Normal file
0
epapi/__init__.py
Normal file
6
epapi/apps.py
Normal file
6
epapi/apps.py
Normal file
@ -0,0 +1,6 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class EpapiConfig(AppConfig):
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "epapi"
|
||||
0
epapi/migrations/__init__.py
Normal file
0
epapi/migrations/__init__.py
Normal file
1
epapi/tests/__init__.py
Normal file
1
epapi/tests/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# Tests for epapi app
|
||||
1
epapi/tests/utils/__init__.py
Normal file
1
epapi/tests/utils/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
"""Tests for epapi utility modules."""
|
||||
218
epapi/tests/utils/test_validation_errors.py
Normal file
218
epapi/tests/utils/test_validation_errors.py
Normal file
@ -0,0 +1,218 @@
|
||||
"""
|
||||
Tests for validation error utilities.
|
||||
|
||||
Tests the format_validation_error() and handle_validation_error() functions
|
||||
that transform Pydantic validation errors into user-friendly messages.
|
||||
"""
|
||||
|
||||
from django.test import TestCase, tag
|
||||
import json
|
||||
from pydantic import BaseModel, ValidationError, field_validator
|
||||
from typing import Literal
|
||||
|
||||
from ninja.errors import HttpError
|
||||
from epapi.utils.validation_errors import format_validation_error, handle_validation_error
|
||||
|
||||
|
||||
@tag("api", "utils")
|
||||
class ValidationErrorUtilityTests(TestCase):
|
||||
"""Test validation error utility functions."""
|
||||
|
||||
def test_format_missing_field_error(self):
|
||||
"""Test formatting of missing required field error."""
|
||||
|
||||
# Create a model with required field
|
||||
class TestModel(BaseModel):
|
||||
required_field: str
|
||||
|
||||
# Trigger validation error
|
||||
try:
|
||||
TestModel()
|
||||
except ValidationError as e:
|
||||
errors = e.errors()
|
||||
self.assertEqual(len(errors), 1)
|
||||
formatted = format_validation_error(errors[0])
|
||||
self.assertEqual(formatted, "This field is required")
|
||||
|
||||
def test_format_enum_error(self):
|
||||
"""Test formatting of enum validation error."""
|
||||
|
||||
class TestModel(BaseModel):
|
||||
status: Literal["active", "inactive"]
|
||||
|
||||
try:
|
||||
TestModel(status="invalid")
|
||||
except ValidationError as e:
|
||||
errors = e.errors()
|
||||
self.assertEqual(len(errors), 1)
|
||||
formatted = format_validation_error(errors[0])
|
||||
# Literal errors get formatted as "Please enter ..." with the valid options
|
||||
self.assertIn("Please enter", formatted)
|
||||
self.assertIn("active", formatted)
|
||||
self.assertIn("inactive", formatted)
|
||||
|
||||
def test_format_type_errors(self):
|
||||
"""Test formatting of type validation errors (string, int, float)."""
|
||||
test_cases = [
|
||||
# (field_type, invalid_value, expected_message)
|
||||
# Note: We don't check exact error_type as Pydantic may use different types
|
||||
# (e.g., int_type vs int_parsing) but we verify the formatted message is correct
|
||||
(str, 123, "Please enter a valid string"),
|
||||
(int, "not_a_number", "Please enter a valid int"),
|
||||
(float, "not_a_float", "Please enter a valid float"),
|
||||
]
|
||||
|
||||
for field_type, invalid_value, expected_message in test_cases:
|
||||
with self.subTest(field_type=field_type.__name__):
|
||||
|
||||
class TestModel(BaseModel):
|
||||
field: field_type
|
||||
|
||||
try:
|
||||
TestModel(field=invalid_value)
|
||||
except ValidationError as e:
|
||||
errors = e.errors()
|
||||
self.assertEqual(len(errors), 1)
|
||||
formatted = format_validation_error(errors[0])
|
||||
self.assertEqual(formatted, expected_message)
|
||||
|
||||
def test_format_value_error(self):
|
||||
"""Test formatting of value error from custom validator."""
|
||||
|
||||
class TestModel(BaseModel):
|
||||
age: int
|
||||
|
||||
@field_validator("age")
|
||||
@classmethod
|
||||
def validate_age(cls, v):
|
||||
if v < 0:
|
||||
raise ValueError("Age must be positive")
|
||||
return v
|
||||
|
||||
try:
|
||||
TestModel(age=-5)
|
||||
except ValidationError as e:
|
||||
errors = e.errors()
|
||||
self.assertEqual(len(errors), 1)
|
||||
formatted = format_validation_error(errors[0])
|
||||
self.assertEqual(formatted, "Age must be positive")
|
||||
|
||||
def test_format_unknown_error_type_fallback(self):
|
||||
"""Test that unknown error types fall back to default formatting."""
|
||||
# Mock an error with an unknown type
|
||||
mock_error = {
|
||||
"type": "unknown_custom_type",
|
||||
"msg": "Input should be a valid email address",
|
||||
"ctx": {},
|
||||
}
|
||||
|
||||
formatted = format_validation_error(mock_error)
|
||||
# Should use the else branch which does replacements on the message
|
||||
self.assertEqual(formatted, "Please enter a valid email address")
|
||||
|
||||
def test_handle_validation_error_structure(self):
|
||||
"""Test that handle_validation_error raises HttpError with correct structure."""
|
||||
|
||||
class TestModel(BaseModel):
|
||||
name: str
|
||||
count: int
|
||||
|
||||
try:
|
||||
TestModel(name=123, count="invalid")
|
||||
except ValidationError as e:
|
||||
# handle_validation_error should raise HttpError
|
||||
with self.assertRaises(HttpError) as context:
|
||||
handle_validation_error(e)
|
||||
|
||||
http_error = context.exception
|
||||
self.assertEqual(http_error.status_code, 400)
|
||||
|
||||
# Parse the JSON from the error message
|
||||
error_data = json.loads(http_error.message)
|
||||
|
||||
# Check structure
|
||||
self.assertEqual(error_data["type"], "validation_error")
|
||||
self.assertIn("field_errors", error_data)
|
||||
self.assertIn("message", error_data)
|
||||
self.assertEqual(error_data["message"], "Please correct the errors below")
|
||||
|
||||
# Check that both fields have errors
|
||||
self.assertIn("name", error_data["field_errors"])
|
||||
self.assertIn("count", error_data["field_errors"])
|
||||
|
||||
def test_handle_validation_error_no_pydantic_internals(self):
|
||||
"""Test that handle_validation_error doesn't expose Pydantic internals."""
|
||||
|
||||
class TestModel(BaseModel):
|
||||
email: str
|
||||
|
||||
try:
|
||||
TestModel(email=123)
|
||||
except ValidationError as e:
|
||||
with self.assertRaises(HttpError) as context:
|
||||
handle_validation_error(e)
|
||||
|
||||
http_error = context.exception
|
||||
error_data = json.loads(http_error.message)
|
||||
error_str = json.dumps(error_data)
|
||||
|
||||
# Ensure no Pydantic internals are exposed
|
||||
self.assertNotIn("pydantic", error_str.lower())
|
||||
self.assertNotIn("https://errors.pydantic.dev", error_str)
|
||||
self.assertNotIn("loc", error_str)
|
||||
|
||||
def test_handle_validation_error_user_friendly_messages(self):
|
||||
"""Test that all error messages are user-friendly."""
|
||||
|
||||
class TestModel(BaseModel):
|
||||
name: str
|
||||
age: int
|
||||
status: Literal["active", "inactive"]
|
||||
|
||||
try:
|
||||
TestModel(name=123, status="invalid") # Multiple errors
|
||||
except ValidationError as e:
|
||||
with self.assertRaises(HttpError) as context:
|
||||
handle_validation_error(e)
|
||||
|
||||
http_error = context.exception
|
||||
error_data = json.loads(http_error.message)
|
||||
|
||||
# All messages should be user-friendly (contain "Please" or "This field")
|
||||
for field, messages in error_data["field_errors"].items():
|
||||
for message in messages:
|
||||
# User-friendly messages start with "Please" or "This field"
|
||||
self.assertTrue(
|
||||
message.startswith("Please") or message.startswith("This field"),
|
||||
f"Message '{message}' is not user-friendly",
|
||||
)
|
||||
|
||||
def test_handle_validation_error_multiple_errors_same_field(self):
|
||||
"""Test handling multiple validation errors for the same field."""
|
||||
|
||||
class TestModel(BaseModel):
|
||||
value: int
|
||||
|
||||
@field_validator("value")
|
||||
@classmethod
|
||||
def validate_range(cls, v):
|
||||
if v < 0:
|
||||
raise ValueError("Must be non-negative")
|
||||
if v > 100:
|
||||
raise ValueError("Must be at most 100")
|
||||
return v
|
||||
|
||||
# Test with string (type error) - this will fail before the validator runs
|
||||
try:
|
||||
TestModel(value="invalid")
|
||||
except ValidationError as e:
|
||||
with self.assertRaises(HttpError) as context:
|
||||
handle_validation_error(e)
|
||||
|
||||
http_error = context.exception
|
||||
error_data = json.loads(http_error.message)
|
||||
|
||||
# Should have error for 'value' field
|
||||
self.assertIn("value", error_data["field_errors"])
|
||||
self.assertIsInstance(error_data["field_errors"]["value"], list)
|
||||
self.assertGreater(len(error_data["field_errors"]["value"]), 0)
|
||||
1
epapi/tests/v1/__init__.py
Normal file
1
epapi/tests/v1/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# Tests for epapi v1 API
|
||||
448
epapi/tests/v1/test_additional_information.py
Normal file
448
epapi/tests/v1/test_additional_information.py
Normal file
@ -0,0 +1,448 @@
|
||||
"""
|
||||
Tests for Additional Information API endpoints.
|
||||
|
||||
Tests CRUD operations on scenario additional information including the new PATCH endpoint.
|
||||
"""
|
||||
|
||||
from django.test import TestCase, tag
|
||||
import json
|
||||
from uuid import uuid4
|
||||
|
||||
from epdb.logic import PackageManager, UserManager
|
||||
from epdb.models import Scenario
|
||||
|
||||
|
||||
@tag("api", "additional_information")
|
||||
class AdditionalInformationAPITests(TestCase):
|
||||
"""Test additional information API endpoints."""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
"""Set up test data: user, package, and scenario."""
|
||||
cls.user = UserManager.create_user(
|
||||
"ai-test-user",
|
||||
"ai-test@envipath.com",
|
||||
"SuperSafe",
|
||||
set_setting=False,
|
||||
add_to_group=False,
|
||||
is_active=True,
|
||||
)
|
||||
cls.other_user = UserManager.create_user(
|
||||
"ai-other-user",
|
||||
"ai-other@envipath.com",
|
||||
"SuperSafe",
|
||||
set_setting=False,
|
||||
add_to_group=False,
|
||||
is_active=True,
|
||||
)
|
||||
cls.package = PackageManager.create_package(
|
||||
cls.user, "AI Test Package", "Test package for additional information"
|
||||
)
|
||||
# Package owned by other_user (no access for cls.user)
|
||||
cls.other_package = PackageManager.create_package(
|
||||
cls.other_user, "Other Package", "Package without access"
|
||||
)
|
||||
# Create a scenario for testing
|
||||
cls.scenario = Scenario.objects.create(
|
||||
package=cls.package,
|
||||
name="Test Scenario",
|
||||
description="Test scenario for additional information tests",
|
||||
scenario_type="biodegradation",
|
||||
scenario_date="2024-01-01",
|
||||
additional_information={}, # Initialize with empty dict
|
||||
)
|
||||
cls.other_scenario = Scenario.objects.create(
|
||||
package=cls.other_package,
|
||||
name="Other Scenario",
|
||||
description="Scenario in package without access",
|
||||
scenario_type="biodegradation",
|
||||
scenario_date="2024-01-01",
|
||||
additional_information={},
|
||||
)
|
||||
|
||||
def test_list_all_schemas(self):
|
||||
"""Test GET /api/v1/information/schema/ returns all schemas."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
response = self.client.get("/api/v1/information/schema/")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = response.json()
|
||||
self.assertIsInstance(data, dict)
|
||||
# Should have multiple schemas
|
||||
self.assertGreater(len(data), 0)
|
||||
# Each schema should have RJSF format
|
||||
for name, schema in data.items():
|
||||
self.assertIn("schema", schema)
|
||||
self.assertIn("uiSchema", schema)
|
||||
self.assertIn("formData", schema)
|
||||
self.assertIn("groups", schema)
|
||||
|
||||
def test_get_specific_schema(self):
|
||||
"""Test GET /api/v1/information/schema/{model_name}/ returns specific schema."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
# Assuming 'temperature' is a valid model
|
||||
response = self.client.get("/api/v1/information/schema/temperature/")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = response.json()
|
||||
self.assertIn("schema", data)
|
||||
self.assertIn("uiSchema", data)
|
||||
|
||||
def test_get_nonexistent_schema_returns_404(self):
|
||||
"""Test GET for non-existent schema returns 404."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
response = self.client.get("/api/v1/information/schema/nonexistent/")
|
||||
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
def test_list_scenario_information_empty(self):
|
||||
"""Test GET /api/v1/scenario/{uuid}/information/ returns empty list initially."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
response = self.client.get(f"/api/v1/scenario/{self.scenario.uuid}/information/")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = response.json()
|
||||
self.assertIsInstance(data, list)
|
||||
self.assertEqual(len(data), 0)
|
||||
|
||||
def test_create_additional_information(self):
|
||||
"""Test POST creates additional information."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
# Create temperature information (assuming temperature model exists)
|
||||
payload = {"interval": {"start": 20, "end": 25}}
|
||||
response = self.client.post(
|
||||
f"/api/v1/scenario/{self.scenario.uuid}/information/temperature/",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = response.json()
|
||||
self.assertEqual(data["status"], "created")
|
||||
self.assertIn("uuid", data)
|
||||
self.assertIsNotNone(data["uuid"])
|
||||
|
||||
def test_create_with_invalid_data_returns_400(self):
|
||||
"""Test POST with invalid data returns 400 with validation errors."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
# Invalid data (missing required fields or wrong types)
|
||||
payload = {"invalid_field": "value"}
|
||||
response = self.client.post(
|
||||
f"/api/v1/scenario/{self.scenario.uuid}/information/temperature/",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
data = response.json()
|
||||
# Should have validation error details in 'detail' field
|
||||
self.assertIn("detail", data)
|
||||
|
||||
def test_validation_errors_are_user_friendly(self):
|
||||
"""Test that validation errors are user-friendly and field-specific."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
# Invalid data - wrong type (string instead of number in interval)
|
||||
payload = {"interval": {"start": "not_a_number", "end": 25}}
|
||||
response = self.client.post(
|
||||
f"/api/v1/scenario/{self.scenario.uuid}/information/temperature/",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
data = response.json()
|
||||
|
||||
# Parse the error response - Django Ninja wraps errors in 'detail'
|
||||
error_str = data.get("detail") or data.get("error")
|
||||
self.assertIsNotNone(error_str, "Response should contain error details")
|
||||
|
||||
# Parse the JSON error string
|
||||
error_data = json.loads(error_str)
|
||||
|
||||
# Check structure
|
||||
self.assertEqual(error_data.get("type"), "validation_error")
|
||||
self.assertIn("field_errors", error_data)
|
||||
self.assertIn("message", error_data)
|
||||
|
||||
# Ensure error messages are user-friendly (no Pydantic URLs or technical jargon)
|
||||
error_str = json.dumps(error_data)
|
||||
self.assertNotIn("pydantic", error_str.lower())
|
||||
self.assertNotIn("https://errors.pydantic.dev", error_str)
|
||||
self.assertNotIn("loc", error_str) # No technical field like 'loc'
|
||||
|
||||
# Check that error message is helpful
|
||||
self.assertIn("Please", error_data["message"]) # User-friendly language
|
||||
|
||||
def test_patch_additional_information(self):
|
||||
"""Test PATCH updates existing additional information."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
# First create an item
|
||||
create_payload = {"interval": {"start": 20, "end": 25}}
|
||||
create_response = self.client.post(
|
||||
f"/api/v1/scenario/{self.scenario.uuid}/information/temperature/",
|
||||
data=json.dumps(create_payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
item_uuid = create_response.json()["uuid"]
|
||||
|
||||
# Then update it with PATCH
|
||||
update_payload = {"interval": {"start": 30, "end": 35}}
|
||||
patch_response = self.client.patch(
|
||||
f"/api/v1/scenario/{self.scenario.uuid}/information/item/{item_uuid}/",
|
||||
data=json.dumps(update_payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(patch_response.status_code, 200)
|
||||
data = patch_response.json()
|
||||
self.assertEqual(data["status"], "updated")
|
||||
self.assertEqual(data["uuid"], item_uuid) # UUID preserved
|
||||
|
||||
# Verify the data was updated
|
||||
list_response = self.client.get(f"/api/v1/scenario/{self.scenario.uuid}/information/")
|
||||
items = list_response.json()
|
||||
self.assertEqual(len(items), 1)
|
||||
updated_item = items[0]
|
||||
self.assertEqual(updated_item["uuid"], item_uuid)
|
||||
self.assertEqual(updated_item["data"]["interval"]["start"], 30)
|
||||
self.assertEqual(updated_item["data"]["interval"]["end"], 35)
|
||||
|
||||
def test_patch_nonexistent_item_returns_404(self):
|
||||
"""Test PATCH on non-existent item returns 404."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
fake_uuid = str(uuid4())
|
||||
payload = {"interval": {"start": 30, "end": 35}}
|
||||
response = self.client.patch(
|
||||
f"/api/v1/scenario/{self.scenario.uuid}/information/item/{fake_uuid}/",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
def test_patch_with_invalid_data_returns_400(self):
|
||||
"""Test PATCH with invalid data returns 400."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
# First create an item
|
||||
create_payload = {"interval": {"start": 20, "end": 25}}
|
||||
create_response = self.client.post(
|
||||
f"/api/v1/scenario/{self.scenario.uuid}/information/temperature/",
|
||||
data=json.dumps(create_payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
item_uuid = create_response.json()["uuid"]
|
||||
|
||||
# Try to update with invalid data
|
||||
invalid_payload = {"invalid_field": "value"}
|
||||
patch_response = self.client.patch(
|
||||
f"/api/v1/scenario/{self.scenario.uuid}/information/item/{item_uuid}/",
|
||||
data=json.dumps(invalid_payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(patch_response.status_code, 400)
|
||||
|
||||
def test_patch_validation_errors_are_user_friendly(self):
|
||||
"""Test that PATCH validation errors are user-friendly and field-specific."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
# First create an item
|
||||
create_payload = {"interval": {"start": 20, "end": 25}}
|
||||
create_response = self.client.post(
|
||||
f"/api/v1/scenario/{self.scenario.uuid}/information/temperature/",
|
||||
data=json.dumps(create_payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
item_uuid = create_response.json()["uuid"]
|
||||
|
||||
# Update with invalid data - wrong type (string instead of number in interval)
|
||||
invalid_payload = {"interval": {"start": "not_a_number", "end": 25}}
|
||||
patch_response = self.client.patch(
|
||||
f"/api/v1/scenario/{self.scenario.uuid}/information/item/{item_uuid}/",
|
||||
data=json.dumps(invalid_payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(patch_response.status_code, 400)
|
||||
data = patch_response.json()
|
||||
|
||||
# Parse the error response - Django Ninja wraps errors in 'detail'
|
||||
error_str = data.get("detail") or data.get("error")
|
||||
self.assertIsNotNone(error_str, "Response should contain error details")
|
||||
|
||||
# Parse the JSON error string
|
||||
error_data = json.loads(error_str)
|
||||
|
||||
# Check structure
|
||||
self.assertEqual(error_data.get("type"), "validation_error")
|
||||
self.assertIn("field_errors", error_data)
|
||||
self.assertIn("message", error_data)
|
||||
|
||||
# Ensure error messages are user-friendly (no Pydantic URLs or technical jargon)
|
||||
error_str = json.dumps(error_data)
|
||||
self.assertNotIn("pydantic", error_str.lower())
|
||||
self.assertNotIn("https://errors.pydantic.dev", error_str)
|
||||
self.assertNotIn("loc", error_str) # No technical field like 'loc'
|
||||
|
||||
# Check that error message is helpful
|
||||
self.assertIn("Please", error_data["message"]) # User-friendly language
|
||||
|
||||
def test_delete_additional_information(self):
|
||||
"""Test DELETE removes additional information."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
# Create an item
|
||||
create_payload = {"interval": {"start": 20, "end": 25}}
|
||||
create_response = self.client.post(
|
||||
f"/api/v1/scenario/{self.scenario.uuid}/information/temperature/",
|
||||
data=json.dumps(create_payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
item_uuid = create_response.json()["uuid"]
|
||||
|
||||
# Delete it
|
||||
delete_response = self.client.delete(
|
||||
f"/api/v1/scenario/{self.scenario.uuid}/information/item/{item_uuid}/"
|
||||
)
|
||||
|
||||
self.assertEqual(delete_response.status_code, 200)
|
||||
data = delete_response.json()
|
||||
self.assertEqual(data["status"], "deleted")
|
||||
|
||||
# Verify deletion
|
||||
list_response = self.client.get(f"/api/v1/scenario/{self.scenario.uuid}/information/")
|
||||
items = list_response.json()
|
||||
self.assertEqual(len(items), 0)
|
||||
|
||||
def test_delete_nonexistent_item_returns_404(self):
|
||||
"""Test DELETE on non-existent item returns 404."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
fake_uuid = str(uuid4())
|
||||
response = self.client.delete(
|
||||
f"/api/v1/scenario/{self.scenario.uuid}/information/item/{fake_uuid}/"
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
def test_multiple_items_crud(self):
|
||||
"""Test creating, updating, and deleting multiple items."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
# Create first item
|
||||
item1_payload = {"interval": {"start": 20, "end": 25}}
|
||||
response1 = self.client.post(
|
||||
f"/api/v1/scenario/{self.scenario.uuid}/information/temperature/",
|
||||
data=json.dumps(item1_payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
item1_uuid = response1.json()["uuid"]
|
||||
|
||||
# Create second item (different type if available, or same type)
|
||||
item2_payload = {"interval": {"start": 30, "end": 35}}
|
||||
response2 = self.client.post(
|
||||
f"/api/v1/scenario/{self.scenario.uuid}/information/temperature/",
|
||||
data=json.dumps(item2_payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
item2_uuid = response2.json()["uuid"]
|
||||
|
||||
# Verify both exist
|
||||
list_response = self.client.get(f"/api/v1/scenario/{self.scenario.uuid}/information/")
|
||||
items = list_response.json()
|
||||
self.assertEqual(len(items), 2)
|
||||
|
||||
# Update first item
|
||||
update_payload = {"interval": {"start": 15, "end": 20}}
|
||||
self.client.patch(
|
||||
f"/api/v1/scenario/{self.scenario.uuid}/information/item/{item1_uuid}/",
|
||||
data=json.dumps(update_payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
# Delete second item
|
||||
self.client.delete(f"/api/v1/scenario/{self.scenario.uuid}/information/item/{item2_uuid}/")
|
||||
|
||||
# Verify final state: one item with updated data
|
||||
list_response = self.client.get(f"/api/v1/scenario/{self.scenario.uuid}/information/")
|
||||
items = list_response.json()
|
||||
self.assertEqual(len(items), 1)
|
||||
self.assertEqual(items[0]["uuid"], item1_uuid)
|
||||
self.assertEqual(items[0]["data"]["interval"]["start"], 15)
|
||||
|
||||
def test_list_info_denied_without_permission(self):
|
||||
"""User cannot list info for scenario in package they don't have access to"""
|
||||
self.client.force_login(self.user)
|
||||
response = self.client.get(f"/api/v1/scenario/{self.other_scenario.uuid}/information/")
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
def test_add_info_denied_without_permission(self):
|
||||
"""User cannot add info to scenario in package they don't have access to"""
|
||||
self.client.force_login(self.user)
|
||||
payload = {"interval": {"start": 25, "end": 30}}
|
||||
response = self.client.post(
|
||||
f"/api/v1/scenario/{self.other_scenario.uuid}/information/temperature/",
|
||||
json.dumps(payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
def test_update_info_denied_without_permission(self):
|
||||
"""User cannot update info in scenario they don't have access to"""
|
||||
self.client.force_login(self.other_user)
|
||||
# First create an item as other_user
|
||||
create_payload = {"interval": {"start": 20, "end": 25}}
|
||||
create_response = self.client.post(
|
||||
f"/api/v1/scenario/{self.other_scenario.uuid}/information/temperature/",
|
||||
data=json.dumps(create_payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
item_uuid = create_response.json()["uuid"]
|
||||
|
||||
# Try to update as user (who doesn't have access)
|
||||
self.client.force_login(self.user)
|
||||
update_payload = {"interval": {"start": 30, "end": 35}}
|
||||
response = self.client.patch(
|
||||
f"/api/v1/scenario/{self.other_scenario.uuid}/information/item/{item_uuid}/",
|
||||
data=json.dumps(update_payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
def test_delete_info_denied_without_permission(self):
|
||||
"""User cannot delete info from scenario they don't have access to"""
|
||||
self.client.force_login(self.other_user)
|
||||
# First create an item as other_user
|
||||
create_payload = {"interval": {"start": 20, "end": 25}}
|
||||
create_response = self.client.post(
|
||||
f"/api/v1/scenario/{self.other_scenario.uuid}/information/temperature/",
|
||||
data=json.dumps(create_payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
item_uuid = create_response.json()["uuid"]
|
||||
|
||||
# Try to delete as user (who doesn't have access)
|
||||
self.client.force_login(self.user)
|
||||
response = self.client.delete(
|
||||
f"/api/v1/scenario/{self.other_scenario.uuid}/information/item/{item_uuid}/"
|
||||
)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
def test_nonexistent_scenario_returns_404(self):
|
||||
"""Test operations on non-existent scenario return 404."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
fake_uuid = uuid4()
|
||||
response = self.client.get(f"/api/v1/scenario/{fake_uuid}/information/")
|
||||
|
||||
self.assertEqual(response.status_code, 404)
|
||||
477
epapi/tests/v1/test_api_permissions.py
Normal file
477
epapi/tests/v1/test_api_permissions.py
Normal file
@ -0,0 +1,477 @@
|
||||
from django.test import TestCase, tag
|
||||
|
||||
from epdb.logic import GroupManager, PackageManager, UserManager
|
||||
from epdb.models import (
|
||||
Compound,
|
||||
GroupPackagePermission,
|
||||
Permission,
|
||||
UserPackagePermission,
|
||||
)
|
||||
|
||||
|
||||
@tag("api", "end2end")
|
||||
class APIPermissionTestBase(TestCase):
|
||||
"""
|
||||
Base class for API permission tests.
|
||||
|
||||
Sets up common test data:
|
||||
- user1: Owner of packages
|
||||
- user2: User with various permissions
|
||||
- user3: User with no permissions
|
||||
- reviewed_package: Public package (reviewed=True)
|
||||
- unreviewed_package_owned: Unreviewed package owned by user1
|
||||
- unreviewed_package_read: Unreviewed package with READ permission for user2
|
||||
- unreviewed_package_write: Unreviewed package with WRITE permission for user2
|
||||
- unreviewed_package_all: Unreviewed package with ALL permission for user2
|
||||
- unreviewed_package_no_access: Unreviewed package with no permissions for user2/user3
|
||||
- group_package: Unreviewed package accessible via group permission
|
||||
- test_group: Group containing user2
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
# Create users
|
||||
cls.user1 = UserManager.create_user(
|
||||
"permission-user1",
|
||||
"permission-user1@envipath.com",
|
||||
"SuperSafe",
|
||||
set_setting=False,
|
||||
add_to_group=False,
|
||||
is_active=True,
|
||||
)
|
||||
cls.user2 = UserManager.create_user(
|
||||
"permission-user2",
|
||||
"permission-user2@envipath.com",
|
||||
"SuperSafe",
|
||||
set_setting=False,
|
||||
add_to_group=False,
|
||||
is_active=True,
|
||||
)
|
||||
cls.user3 = UserManager.create_user(
|
||||
"permission-user3",
|
||||
"permission-user3@envipath.com",
|
||||
"SuperSafe",
|
||||
set_setting=False,
|
||||
add_to_group=False,
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
# Delete default packages to ensure clean test data
|
||||
for user in [cls.user1, cls.user2, cls.user3]:
|
||||
default_pkg = user.default_package
|
||||
user.default_package = None
|
||||
user.save()
|
||||
if default_pkg:
|
||||
default_pkg.delete()
|
||||
|
||||
# Create reviewed package (public)
|
||||
cls.reviewed_package = PackageManager.create_package(
|
||||
cls.user1, "Reviewed Package", "Public package"
|
||||
)
|
||||
cls.reviewed_package.reviewed = True
|
||||
cls.reviewed_package.save()
|
||||
|
||||
# Create unreviewed packages with various permissions
|
||||
cls.unreviewed_package_owned = PackageManager.create_package(
|
||||
cls.user1, "User1 Owned Package", "Owned by user1"
|
||||
)
|
||||
|
||||
cls.unreviewed_package_read = PackageManager.create_package(
|
||||
cls.user1, "User2 Read Package", "User2 has READ permission"
|
||||
)
|
||||
UserPackagePermission.objects.create(
|
||||
user=cls.user2, package=cls.unreviewed_package_read, permission=Permission.READ[0]
|
||||
)
|
||||
|
||||
cls.unreviewed_package_write = PackageManager.create_package(
|
||||
cls.user1, "User2 Write Package", "User2 has WRITE permission"
|
||||
)
|
||||
UserPackagePermission.objects.create(
|
||||
user=cls.user2, package=cls.unreviewed_package_write, permission=Permission.WRITE[0]
|
||||
)
|
||||
|
||||
cls.unreviewed_package_all = PackageManager.create_package(
|
||||
cls.user1, "User2 All Package", "User2 has ALL permission"
|
||||
)
|
||||
UserPackagePermission.objects.create(
|
||||
user=cls.user2, package=cls.unreviewed_package_all, permission=Permission.ALL[0]
|
||||
)
|
||||
|
||||
cls.unreviewed_package_no_access = PackageManager.create_package(
|
||||
cls.user1, "No Access Package", "No permissions for user2/user3"
|
||||
)
|
||||
|
||||
# Create group and group package
|
||||
cls.test_group = GroupManager.create_group(
|
||||
cls.user1, "Test Group", "Group for permission testing"
|
||||
)
|
||||
cls.test_group.user_member.add(cls.user2)
|
||||
cls.test_group.save()
|
||||
|
||||
cls.group_package = PackageManager.create_package(
|
||||
cls.user1, "Group Package", "Accessible via group permission"
|
||||
)
|
||||
GroupPackagePermission.objects.create(
|
||||
group=cls.test_group, package=cls.group_package, permission=Permission.READ[0]
|
||||
)
|
||||
|
||||
# Create test compounds in each package
|
||||
cls.reviewed_compound = Compound.create(
|
||||
cls.reviewed_package, "C", "Reviewed Compound", "Test compound"
|
||||
)
|
||||
cls.owned_compound = Compound.create(
|
||||
cls.unreviewed_package_owned, "CC", "Owned Compound", "Test compound"
|
||||
)
|
||||
cls.read_compound = Compound.create(
|
||||
cls.unreviewed_package_read, "CCC", "Read Compound", "Test compound"
|
||||
)
|
||||
cls.write_compound = Compound.create(
|
||||
cls.unreviewed_package_write, "CCCC", "Write Compound", "Test compound"
|
||||
)
|
||||
cls.all_compound = Compound.create(
|
||||
cls.unreviewed_package_all, "CCCCC", "All Compound", "Test compound"
|
||||
)
|
||||
cls.no_access_compound = Compound.create(
|
||||
cls.unreviewed_package_no_access, "CCCCCC", "No Access Compound", "Test compound"
|
||||
)
|
||||
cls.group_compound = Compound.create(
|
||||
cls.group_package, "CCCCCCC", "Group Compound", "Test compound"
|
||||
)
|
||||
|
||||
|
||||
@tag("api", "end2end")
|
||||
class PackageListPermissionTest(APIPermissionTestBase):
|
||||
"""
|
||||
Test permissions for /api/v1/packages/ endpoint.
|
||||
|
||||
Special case: This endpoint allows anonymous access (auth=None)
|
||||
"""
|
||||
|
||||
ENDPOINT = "/api/v1/packages/"
|
||||
|
||||
def test_anonymous_user_sees_only_reviewed_packages(self):
|
||||
"""Anonymous users should only see reviewed packages."""
|
||||
self.client.logout()
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
|
||||
# Should only see reviewed package
|
||||
self.assertEqual(payload["total_items"], 1)
|
||||
self.assertEqual(payload["items"][0]["uuid"], str(self.reviewed_package.uuid))
|
||||
self.assertEqual(payload["items"][0]["review_status"], "reviewed")
|
||||
|
||||
def test_authenticated_user_sees_all_readable_packages(self):
|
||||
"""Authenticated users see reviewed + packages they have access to."""
|
||||
self.client.force_login(self.user2)
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
|
||||
# user2 should see:
|
||||
# - reviewed_package (public)
|
||||
# - unreviewed_package_read (READ permission)
|
||||
# - unreviewed_package_write (WRITE permission)
|
||||
# - unreviewed_package_all (ALL permission)
|
||||
# - group_package (via group membership)
|
||||
# Total: 5 packages
|
||||
self.assertEqual(payload["total_items"], 5)
|
||||
|
||||
visible_uuids = {item["uuid"] for item in payload["items"]}
|
||||
expected_uuids = {
|
||||
str(self.reviewed_package.uuid),
|
||||
str(self.unreviewed_package_read.uuid),
|
||||
str(self.unreviewed_package_write.uuid),
|
||||
str(self.unreviewed_package_all.uuid),
|
||||
str(self.group_package.uuid),
|
||||
}
|
||||
self.assertEqual(visible_uuids, expected_uuids)
|
||||
|
||||
def test_owner_sees_all_owned_packages(self):
|
||||
"""Package owner sees all packages they created."""
|
||||
self.client.force_login(self.user1)
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
|
||||
# user1 owns all packages
|
||||
# Total: 7 packages (all packages created in setUpTestData)
|
||||
self.assertEqual(payload["total_items"], 7)
|
||||
|
||||
def test_filter_by_review_status_true(self):
|
||||
"""Filter to show only reviewed packages."""
|
||||
self.client.force_login(self.user2)
|
||||
response = self.client.get(self.ENDPOINT, {"review_status": True})
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
|
||||
# Only reviewed_package
|
||||
self.assertEqual(payload["total_items"], 1)
|
||||
self.assertTrue(all(item["review_status"] == "reviewed" for item in payload["items"]))
|
||||
|
||||
def test_filter_by_review_status_false(self):
|
||||
"""Filter to show only unreviewed packages."""
|
||||
self.client.force_login(self.user2)
|
||||
response = self.client.get(self.ENDPOINT, {"review_status": False})
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
|
||||
# user2's accessible unreviewed packages: 4
|
||||
self.assertEqual(payload["total_items"], 4)
|
||||
self.assertTrue(all(item["review_status"] == "unreviewed" for item in payload["items"]))
|
||||
|
||||
def test_anonymous_filter_unreviewed_returns_empty(self):
|
||||
"""Anonymous users get no results when filtering for unreviewed."""
|
||||
self.client.logout()
|
||||
response = self.client.get(self.ENDPOINT, {"review_status": False})
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
|
||||
self.assertEqual(payload["total_items"], 0)
|
||||
|
||||
|
||||
@tag("api", "end2end")
|
||||
class GlobalCompoundListPermissionTest(APIPermissionTestBase):
|
||||
"""
|
||||
Test permissions for /api/v1/compounds/ endpoint.
|
||||
|
||||
This endpoint requires authentication.
|
||||
"""
|
||||
|
||||
ENDPOINT = "/api/v1/compounds/"
|
||||
|
||||
def test_anonymous_user_cannot_access(self):
|
||||
"""Anonymous users should get 401 Unauthorized."""
|
||||
self.client.logout()
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
|
||||
self.assertEqual(response.status_code, 401)
|
||||
|
||||
def test_authenticated_user_sees_compounds_from_readable_packages(self):
|
||||
"""Authenticated users see compounds from packages they can read."""
|
||||
self.client.force_login(self.user2)
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
|
||||
self.assertEqual(payload["total_items"], 5)
|
||||
|
||||
visible_uuids = {item["uuid"] for item in payload["items"]}
|
||||
expected_uuids = {
|
||||
str(self.reviewed_compound.uuid),
|
||||
str(self.read_compound.uuid),
|
||||
str(self.write_compound.uuid),
|
||||
str(self.all_compound.uuid),
|
||||
str(self.group_compound.uuid),
|
||||
}
|
||||
self.assertEqual(visible_uuids, expected_uuids)
|
||||
|
||||
def test_user_without_permission_cannot_see_compound(self):
|
||||
"""User without permission to package cannot see its compounds."""
|
||||
self.client.force_login(self.user3)
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
|
||||
# user3 should only see compounds from reviewed_package
|
||||
self.assertEqual(payload["total_items"], 1)
|
||||
self.assertEqual(payload["items"][0]["uuid"], str(self.reviewed_compound.uuid))
|
||||
|
||||
def test_owner_sees_all_compounds(self):
|
||||
"""Package owner sees all compounds they created."""
|
||||
self.client.force_login(self.user1)
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
|
||||
# user1 owns all packages, so sees all compounds
|
||||
self.assertEqual(payload["total_items"], 7)
|
||||
|
||||
|
||||
@tag("api", "end2end")
|
||||
class PackageScopedCompoundListPermissionTest(APIPermissionTestBase):
|
||||
"""
|
||||
Test permissions for /api/v1/package/{uuid}/compound/ endpoint.
|
||||
|
||||
This endpoint requires authentication AND package access.
|
||||
"""
|
||||
|
||||
def test_anonymous_user_cannot_access_reviewed_package(self):
|
||||
"""Anonymous users should get 401 even for reviewed packages."""
|
||||
self.client.logout()
|
||||
endpoint = f"/api/v1/package/{self.reviewed_package.uuid}/compound/"
|
||||
response = self.client.get(endpoint)
|
||||
|
||||
self.assertEqual(response.status_code, 401)
|
||||
|
||||
def test_authenticated_user_can_access_reviewed_package(self):
|
||||
"""Authenticated users can access reviewed packages."""
|
||||
self.client.force_login(self.user3)
|
||||
endpoint = f"/api/v1/package/{self.reviewed_package.uuid}/compound/"
|
||||
response = self.client.get(endpoint)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
|
||||
self.assertEqual(payload["total_items"], 1)
|
||||
self.assertEqual(payload["items"][0]["uuid"], str(self.reviewed_compound.uuid))
|
||||
|
||||
def test_user_can_access_package_with_read_permission(self):
|
||||
"""User with READ permission can access package-scoped endpoint."""
|
||||
self.client.force_login(self.user2)
|
||||
endpoint = f"/api/v1/package/{self.unreviewed_package_read.uuid}/compound/"
|
||||
response = self.client.get(endpoint)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
|
||||
self.assertEqual(payload["total_items"], 1)
|
||||
self.assertEqual(payload["items"][0]["uuid"], str(self.read_compound.uuid))
|
||||
|
||||
def test_user_can_access_package_with_write_permission(self):
|
||||
"""User with WRITE permission can access package-scoped endpoint."""
|
||||
self.client.force_login(self.user2)
|
||||
endpoint = f"/api/v1/package/{self.unreviewed_package_write.uuid}/compound/"
|
||||
response = self.client.get(endpoint)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
|
||||
self.assertEqual(payload["total_items"], 1)
|
||||
self.assertEqual(payload["items"][0]["uuid"], str(self.write_compound.uuid))
|
||||
|
||||
def test_user_can_access_package_with_all_permission(self):
|
||||
"""User with ALL permission can access package-scoped endpoint."""
|
||||
self.client.force_login(self.user2)
|
||||
endpoint = f"/api/v1/package/{self.unreviewed_package_all.uuid}/compound/"
|
||||
response = self.client.get(endpoint)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
|
||||
self.assertEqual(payload["total_items"], 1)
|
||||
self.assertEqual(payload["items"][0]["uuid"], str(self.all_compound.uuid))
|
||||
|
||||
def test_user_cannot_access_package_without_permission(self):
|
||||
"""User without permission gets 403 Forbidden."""
|
||||
self.client.force_login(self.user2)
|
||||
endpoint = f"/api/v1/package/{self.unreviewed_package_no_access.uuid}/compound/"
|
||||
response = self.client.get(endpoint)
|
||||
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
def test_nonexistent_package_returns_404(self):
|
||||
"""Request for non-existent package returns 404."""
|
||||
self.client.force_login(self.user2)
|
||||
fake_uuid = "00000000-0000-0000-0000-000000000000"
|
||||
endpoint = f"/api/v1/package/{fake_uuid}/compound/"
|
||||
response = self.client.get(endpoint)
|
||||
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
def test_owner_can_access_owned_package(self):
|
||||
"""Package owner can access their package."""
|
||||
self.client.force_login(self.user1)
|
||||
endpoint = f"/api/v1/package/{self.unreviewed_package_owned.uuid}/compound/"
|
||||
response = self.client.get(endpoint)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
|
||||
self.assertEqual(payload["total_items"], 1)
|
||||
self.assertEqual(payload["items"][0]["uuid"], str(self.owned_compound.uuid))
|
||||
|
||||
def test_group_member_can_access_group_package(self):
|
||||
"""Group member can access package via group permission."""
|
||||
self.client.force_login(self.user2)
|
||||
endpoint = f"/api/v1/package/{self.group_package.uuid}/compound/"
|
||||
response = self.client.get(endpoint)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
|
||||
self.assertEqual(payload["total_items"], 1)
|
||||
self.assertEqual(payload["items"][0]["uuid"], str(self.group_compound.uuid))
|
||||
|
||||
def test_non_group_member_cannot_access_group_package(self):
|
||||
"""Non-group member cannot access package with only group permission."""
|
||||
self.client.force_login(self.user3)
|
||||
endpoint = f"/api/v1/package/{self.group_package.uuid}/compound/"
|
||||
response = self.client.get(endpoint)
|
||||
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
|
||||
@tag("api", "end2end")
|
||||
class MultiResourcePermissionTest(APIPermissionTestBase):
|
||||
"""
|
||||
Test that permission system works consistently across all resource types.
|
||||
|
||||
Tests a sample of other endpoints to ensure permission logic is consistent.
|
||||
"""
|
||||
|
||||
def test_rules_endpoint_respects_permissions(self):
|
||||
"""Rules endpoint uses same permission logic."""
|
||||
from epdb.models import SimpleAmbitRule
|
||||
|
||||
# Create rule in no-access package
|
||||
rule = SimpleAmbitRule.create(
|
||||
self.unreviewed_package_no_access, "Test Rule", "Test", "[C:1]>>[C:1]O"
|
||||
)
|
||||
|
||||
self.client.force_login(self.user2)
|
||||
response = self.client.get("/api/v1/rules/")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
|
||||
# user2 should not see the rule from no_access_package
|
||||
rule_uuids = [item["uuid"] for item in payload["items"]]
|
||||
self.assertNotIn(str(rule.uuid), rule_uuids)
|
||||
|
||||
def test_reactions_endpoint_respects_permissions(self):
|
||||
"""Reactions endpoint uses same permission logic."""
|
||||
from epdb.models import Reaction
|
||||
|
||||
# Create reaction in no-access package
|
||||
reaction = Reaction.create(
|
||||
self.unreviewed_package_no_access, "Test Reaction", "Test", ["C"], ["CO"]
|
||||
)
|
||||
|
||||
self.client.force_login(self.user2)
|
||||
response = self.client.get("/api/v1/reactions/")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
|
||||
# user2 should not see the reaction from no_access_package
|
||||
reaction_uuids = [item["uuid"] for item in payload["items"]]
|
||||
self.assertNotIn(str(reaction.uuid), reaction_uuids)
|
||||
|
||||
def test_pathways_endpoint_respects_permissions(self):
|
||||
"""Pathways endpoint uses same permission logic."""
|
||||
from epdb.models import Pathway
|
||||
|
||||
# Create pathway in no-access package
|
||||
pathway = Pathway.objects.create(
|
||||
package=self.unreviewed_package_no_access, name="Test Pathway", description="Test"
|
||||
)
|
||||
|
||||
self.client.force_login(self.user2)
|
||||
response = self.client.get("/api/v1/pathways/")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
|
||||
# user2 should not see the pathway from no_access_package
|
||||
pathway_uuids = [item["uuid"] for item in payload["items"]]
|
||||
self.assertNotIn(str(pathway.uuid), pathway_uuids)
|
||||
477
epapi/tests/v1/test_contract_get_entities.py
Normal file
477
epapi/tests/v1/test_contract_get_entities.py
Normal file
@ -0,0 +1,477 @@
|
||||
from django.test import TestCase, tag
|
||||
|
||||
from epdb.logic import PackageManager, UserManager
|
||||
from epdb.models import Compound, Reaction, Pathway, EPModel, SimpleAmbitRule, Scenario
|
||||
|
||||
|
||||
class BaseTestAPIGetPaginated:
|
||||
"""
|
||||
Mixin class for API pagination tests.
|
||||
|
||||
Subclasses must inherit from both this class and TestCase, e.g.:
|
||||
class MyTest(BaseTestAPIGetPaginated, TestCase):
|
||||
...
|
||||
|
||||
Subclasses must define:
|
||||
- resource_name: Singular name (e.g., "compound")
|
||||
- resource_name_plural: Plural name (e.g., "compounds")
|
||||
- global_endpoint: Global listing endpoint (e.g., "/api/v1/compounds/")
|
||||
- package_endpoint_template: Template for package-scoped endpoint or None
|
||||
- total_reviewed: Number of reviewed items to create
|
||||
- total_unreviewed: Number of unreviewed items to create
|
||||
- create_reviewed_resource(cls, package, idx): Factory method
|
||||
- create_unreviewed_resource(cls, package, idx): Factory method
|
||||
"""
|
||||
|
||||
# Configuration to be overridden by subclasses
|
||||
resource_name = None
|
||||
resource_name_plural = None
|
||||
global_endpoint = None
|
||||
package_endpoint_template = None
|
||||
total_reviewed = 50
|
||||
total_unreviewed = 20
|
||||
default_page_size = 50
|
||||
max_page_size = 100
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
# Create test user
|
||||
cls.user = UserManager.create_user(
|
||||
f"{cls.resource_name}-user",
|
||||
f"{cls.resource_name}-user@envipath.com",
|
||||
"SuperSafe",
|
||||
set_setting=False,
|
||||
add_to_group=False,
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
# Delete the auto-created default package to ensure clean test data
|
||||
default_pkg = cls.user.default_package
|
||||
cls.user.default_package = None
|
||||
cls.user.save()
|
||||
default_pkg.delete()
|
||||
|
||||
# Create reviewed package
|
||||
cls.reviewed_package = PackageManager.create_package(
|
||||
cls.user, "Reviewed Package", f"Reviewed package for {cls.resource_name} tests"
|
||||
)
|
||||
cls.reviewed_package.reviewed = True
|
||||
cls.reviewed_package.save()
|
||||
|
||||
# Create unreviewed package
|
||||
cls.unreviewed_package = PackageManager.create_package(
|
||||
cls.user, "Draft Package", f"Unreviewed package for {cls.resource_name} tests"
|
||||
)
|
||||
|
||||
# Create reviewed resources
|
||||
for idx in range(cls.total_reviewed):
|
||||
cls.create_reviewed_resource(cls.reviewed_package, idx)
|
||||
|
||||
# Create unreviewed resources
|
||||
for idx in range(cls.total_unreviewed):
|
||||
cls.create_unreviewed_resource(cls.unreviewed_package, idx)
|
||||
|
||||
# Set up package-scoped endpoints if applicable
|
||||
if cls.package_endpoint_template:
|
||||
cls.reviewed_package_endpoint = cls.package_endpoint_template.format(
|
||||
uuid=cls.reviewed_package.uuid
|
||||
)
|
||||
cls.unreviewed_package_endpoint = cls.package_endpoint_template.format(
|
||||
uuid=cls.unreviewed_package.uuid
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_reviewed_resource(cls, package, idx):
|
||||
"""
|
||||
Create a single reviewed resource.
|
||||
Must be implemented by subclass.
|
||||
|
||||
Args:
|
||||
package: The package to create the resource in
|
||||
idx: Index of the resource (0-based)
|
||||
"""
|
||||
raise NotImplementedError(f"{cls.__name__} must implement create_reviewed_resource()")
|
||||
|
||||
@classmethod
|
||||
def create_unreviewed_resource(cls, package, idx):
|
||||
"""
|
||||
Create a single unreviewed resource.
|
||||
Must be implemented by subclass.
|
||||
|
||||
Args:
|
||||
package: The package to create the resource in
|
||||
idx: Index of the resource (0-based)
|
||||
"""
|
||||
raise NotImplementedError(f"{cls.__name__} must implement create_unreviewed_resource()")
|
||||
|
||||
def setUp(self):
|
||||
self.client.force_login(self.user)
|
||||
|
||||
def test_requires_session_authentication(self):
|
||||
"""Test that the global endpoint requires authentication."""
|
||||
self.client.logout()
|
||||
response = self.client.get(self.global_endpoint)
|
||||
self.assertEqual(response.status_code, 401)
|
||||
|
||||
def test_global_listing_uses_default_page_size(self):
|
||||
"""Test that the global endpoint uses default pagination settings."""
|
||||
response = self.client.get(self.global_endpoint, {"review_status": True})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
payload = response.json()
|
||||
self.assertEqual(payload["page"], 1)
|
||||
self.assertEqual(payload["page_size"], self.default_page_size)
|
||||
self.assertEqual(payload["total_items"], self.total_reviewed)
|
||||
|
||||
# Verify only reviewed items are returned
|
||||
self.assertTrue(all(item["review_status"] == "reviewed" for item in payload["items"]))
|
||||
|
||||
def test_can_request_later_page(self):
|
||||
"""Test that pagination works for later pages."""
|
||||
if self.total_reviewed <= self.default_page_size:
|
||||
self.skipTest(
|
||||
f"Not enough items to test pagination "
|
||||
f"({self.total_reviewed} <= {self.default_page_size})"
|
||||
)
|
||||
|
||||
response = self.client.get(self.global_endpoint, {"page": 2, "review_status": True})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
payload = response.json()
|
||||
self.assertEqual(payload["page"], 2)
|
||||
|
||||
# Calculate expected items on page 2
|
||||
expected_items = min(self.default_page_size, self.total_reviewed - self.default_page_size)
|
||||
self.assertEqual(len(payload["items"]), expected_items)
|
||||
|
||||
# Verify only reviewed items are returned
|
||||
self.assertTrue(all(item["review_status"] == "reviewed" for item in payload["items"]))
|
||||
|
||||
def test_page_size_is_capped(self):
|
||||
"""Test that page size is capped at the maximum."""
|
||||
if self.total_reviewed <= self.max_page_size:
|
||||
self.skipTest(
|
||||
f"Not enough items to test page size cap "
|
||||
f"({self.total_reviewed} <= {self.max_page_size})"
|
||||
)
|
||||
|
||||
response = self.client.get(self.global_endpoint, {"page_size": 150})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
payload = response.json()
|
||||
self.assertEqual(payload["page_size"], self.max_page_size)
|
||||
self.assertEqual(len(payload["items"]), self.max_page_size)
|
||||
|
||||
def test_package_endpoint_for_reviewed_package(self):
|
||||
"""Test the package-scoped endpoint for reviewed packages."""
|
||||
if not self.package_endpoint_template:
|
||||
self.skipTest("No package endpoint for this resource")
|
||||
|
||||
response = self.client.get(self.reviewed_package_endpoint)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
payload = response.json()
|
||||
self.assertEqual(payload["total_items"], self.total_reviewed)
|
||||
|
||||
# Verify only reviewed items are returned
|
||||
self.assertTrue(all(item["review_status"] == "reviewed" for item in payload["items"]))
|
||||
|
||||
def test_package_endpoint_for_unreviewed_package(self):
|
||||
"""Test the package-scoped endpoint for unreviewed packages."""
|
||||
if not self.package_endpoint_template:
|
||||
self.skipTest("No package endpoint for this resource")
|
||||
|
||||
response = self.client.get(self.unreviewed_package_endpoint)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
payload = response.json()
|
||||
self.assertEqual(payload["total_items"], self.total_unreviewed)
|
||||
|
||||
# Verify only unreviewed items are returned
|
||||
self.assertTrue(all(item["review_status"] == "unreviewed" for item in payload["items"]))
|
||||
|
||||
|
||||
@tag("api", "end2end")
|
||||
class PackagePaginationAPITest(TestCase):
|
||||
ENDPOINT = "/api/v1/packages/"
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
cls.user = UserManager.create_user(
|
||||
"package-user",
|
||||
"package-user@envipath.com",
|
||||
"SuperSafe",
|
||||
set_setting=False,
|
||||
add_to_group=False,
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
# Delete the auto-created default package to ensure clean test data
|
||||
default_pkg = cls.user.default_package
|
||||
cls.user.default_package = None
|
||||
cls.user.save()
|
||||
default_pkg.delete()
|
||||
|
||||
# Create reviewed packages
|
||||
cls.total_reviewed = 25
|
||||
for idx in range(cls.total_reviewed):
|
||||
package = PackageManager.create_package(
|
||||
cls.user, f"Reviewed Package {idx:03d}", "Reviewed package for tests"
|
||||
)
|
||||
package.reviewed = True
|
||||
package.save()
|
||||
|
||||
# Create unreviewed packages
|
||||
cls.total_unreviewed = 15
|
||||
for idx in range(cls.total_unreviewed):
|
||||
PackageManager.create_package(
|
||||
cls.user, f"Draft Package {idx:03d}", "Unreviewed package for tests"
|
||||
)
|
||||
|
||||
def setUp(self):
|
||||
self.client.force_login(self.user)
|
||||
|
||||
def test_anonymous_can_access_reviewed_packages(self):
|
||||
self.client.logout()
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
payload = response.json()
|
||||
# Anonymous users can only see reviewed packages
|
||||
self.assertEqual(payload["total_items"], self.total_reviewed)
|
||||
self.assertTrue(all(item["review_status"] == "reviewed" for item in payload["items"]))
|
||||
|
||||
def test_listing_uses_default_page_size(self):
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
payload = response.json()
|
||||
self.assertEqual(payload["page"], 1)
|
||||
self.assertEqual(payload["page_size"], 50)
|
||||
self.assertEqual(payload["total_items"], self.total_reviewed + self.total_unreviewed)
|
||||
|
||||
def test_reviewed_filter_true(self):
|
||||
response = self.client.get(self.ENDPOINT, {"review_status": True})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
payload = response.json()
|
||||
self.assertEqual(payload["total_items"], self.total_reviewed)
|
||||
self.assertTrue(all(item["review_status"] == "reviewed" for item in payload["items"]))
|
||||
|
||||
def test_reviewed_filter_false(self):
|
||||
response = self.client.get(self.ENDPOINT, {"review_status": False})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
payload = response.json()
|
||||
self.assertEqual(payload["total_items"], self.total_unreviewed)
|
||||
self.assertTrue(all(item["review_status"] == "unreviewed" for item in payload["items"]))
|
||||
|
||||
def test_reviewed_filter_false_anonymous(self):
|
||||
self.client.logout()
|
||||
response = self.client.get(self.ENDPOINT, {"review_status": False})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
payload = response.json()
|
||||
# Anonymous users cannot access unreviewed packages
|
||||
self.assertEqual(payload["total_items"], 0)
|
||||
|
||||
|
||||
@tag("api", "end2end")
|
||||
class CompoundPaginationAPITest(BaseTestAPIGetPaginated, TestCase):
|
||||
"""Compound pagination tests using base class."""
|
||||
|
||||
resource_name = "compound"
|
||||
resource_name_plural = "compounds"
|
||||
global_endpoint = "/api/v1/compounds/"
|
||||
package_endpoint_template = "/api/v1/package/{uuid}/compound/"
|
||||
total_reviewed = 125
|
||||
total_unreviewed = 35
|
||||
|
||||
@classmethod
|
||||
def create_reviewed_resource(cls, package, idx):
|
||||
simple_smiles = ["C", "CC", "CCC", "CCCC", "CCCCC"]
|
||||
smiles = simple_smiles[idx % len(simple_smiles)] + ("O" * (idx // len(simple_smiles)))
|
||||
return Compound.create(
|
||||
package,
|
||||
smiles,
|
||||
f"Reviewed Compound {idx:03d}",
|
||||
"Compound for pagination tests",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_unreviewed_resource(cls, package, idx):
|
||||
simple_smiles = ["C", "CC", "CCC", "CCCC", "CCCCC"]
|
||||
smiles = simple_smiles[idx % len(simple_smiles)] + ("N" * (idx // len(simple_smiles)))
|
||||
return Compound.create(
|
||||
package,
|
||||
smiles,
|
||||
f"Draft Compound {idx:03d}",
|
||||
"Compound for pagination tests",
|
||||
)
|
||||
|
||||
|
||||
@tag("api", "end2end")
|
||||
class RulePaginationAPITest(BaseTestAPIGetPaginated, TestCase):
|
||||
"""Rule pagination tests using base class."""
|
||||
|
||||
resource_name = "rule"
|
||||
resource_name_plural = "rules"
|
||||
global_endpoint = "/api/v1/rules/"
|
||||
package_endpoint_template = "/api/v1/package/{uuid}/rule/"
|
||||
total_reviewed = 125
|
||||
total_unreviewed = 35
|
||||
|
||||
@classmethod
|
||||
def create_reviewed_resource(cls, package, idx):
|
||||
# Create unique SMIRKS by combining chain length and functional group variations
|
||||
# This ensures each idx gets a truly unique SMIRKS pattern
|
||||
carbon_chain = "C" * (idx + 1) # C, CC, CCC, ... (grows with idx)
|
||||
smirks = f"[{carbon_chain}:1]>>[{carbon_chain}:1]O"
|
||||
return SimpleAmbitRule.create(
|
||||
package,
|
||||
f"Reviewed Rule {idx:03d}",
|
||||
f"Rule {idx} for pagination tests",
|
||||
smirks,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_unreviewed_resource(cls, package, idx):
|
||||
# Create unique SMIRKS by varying the carbon chain length
|
||||
carbon_chain = "C" * (idx + 1) # C, CC, CCC, ... (grows with idx)
|
||||
smirks = f"[{carbon_chain}:1]>>[{carbon_chain}:1]N"
|
||||
return SimpleAmbitRule.create(
|
||||
package,
|
||||
f"Draft Rule {idx:03d}",
|
||||
f"Rule {idx} for pagination tests",
|
||||
smirks,
|
||||
)
|
||||
|
||||
|
||||
@tag("api", "end2end")
|
||||
class ReactionPaginationAPITest(BaseTestAPIGetPaginated, TestCase):
|
||||
"""Reaction pagination tests using base class."""
|
||||
|
||||
resource_name = "reaction"
|
||||
resource_name_plural = "reactions"
|
||||
global_endpoint = "/api/v1/reactions/"
|
||||
package_endpoint_template = "/api/v1/package/{uuid}/reaction/"
|
||||
total_reviewed = 125
|
||||
total_unreviewed = 35
|
||||
|
||||
@classmethod
|
||||
def create_reviewed_resource(cls, package, idx):
|
||||
# Generate unique SMILES with growing chain lengths to avoid duplicates
|
||||
# Each idx gets a unique chain length
|
||||
educt_smiles = "C" * (idx + 1) # C, CC, CCC, ... (grows with idx)
|
||||
product_smiles = educt_smiles + "O"
|
||||
return Reaction.create(
|
||||
package=package,
|
||||
name=f"Reviewed Reaction {idx:03d}",
|
||||
description="Reaction for pagination tests",
|
||||
educts=[educt_smiles],
|
||||
products=[product_smiles],
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_unreviewed_resource(cls, package, idx):
|
||||
# Generate unique SMILES with growing chain lengths to avoid duplicates
|
||||
# Each idx gets a unique chain length
|
||||
educt_smiles = "C" * (idx + 1) # C, CC, CCC, ... (grows with idx)
|
||||
product_smiles = educt_smiles + "N"
|
||||
return Reaction.create(
|
||||
package=package,
|
||||
name=f"Draft Reaction {idx:03d}",
|
||||
description="Reaction for pagination tests",
|
||||
educts=[educt_smiles],
|
||||
products=[product_smiles],
|
||||
)
|
||||
|
||||
|
||||
@tag("api", "end2end")
|
||||
class PathwayPaginationAPITest(BaseTestAPIGetPaginated, TestCase):
|
||||
"""Pathway pagination tests using base class."""
|
||||
|
||||
resource_name = "pathway"
|
||||
resource_name_plural = "pathways"
|
||||
global_endpoint = "/api/v1/pathways/"
|
||||
package_endpoint_template = "/api/v1/package/{uuid}/pathway/"
|
||||
total_reviewed = 125
|
||||
total_unreviewed = 35
|
||||
|
||||
@classmethod
|
||||
def create_reviewed_resource(cls, package, idx):
|
||||
return Pathway.objects.create(
|
||||
package=package,
|
||||
name=f"Reviewed Pathway {idx:03d}",
|
||||
description="Pathway for pagination tests",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_unreviewed_resource(cls, package, idx):
|
||||
return Pathway.objects.create(
|
||||
package=package,
|
||||
name=f"Draft Pathway {idx:03d}",
|
||||
description="Pathway for pagination tests",
|
||||
)
|
||||
|
||||
|
||||
@tag("api", "end2end")
|
||||
class ModelPaginationAPITest(BaseTestAPIGetPaginated, TestCase):
|
||||
"""Model pagination tests using base class."""
|
||||
|
||||
resource_name = "model"
|
||||
resource_name_plural = "models"
|
||||
global_endpoint = "/api/v1/models/"
|
||||
package_endpoint_template = "/api/v1/package/{uuid}/model/"
|
||||
total_reviewed = 125
|
||||
total_unreviewed = 35
|
||||
|
||||
@classmethod
|
||||
def create_reviewed_resource(cls, package, idx):
|
||||
return EPModel.objects.create(
|
||||
package=package,
|
||||
name=f"Reviewed Model {idx:03d}",
|
||||
description="Model for pagination tests",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_unreviewed_resource(cls, package, idx):
|
||||
return EPModel.objects.create(
|
||||
package=package,
|
||||
name=f"Draft Model {idx:03d}",
|
||||
description="Model for pagination tests",
|
||||
)
|
||||
|
||||
|
||||
@tag("api", "end2end")
|
||||
class ScenarioPaginationAPITest(BaseTestAPIGetPaginated, TestCase):
|
||||
"""Scenario pagination tests using base class."""
|
||||
|
||||
resource_name = "scenario"
|
||||
resource_name_plural = "scenarios"
|
||||
global_endpoint = "/api/v1/scenarios/"
|
||||
package_endpoint_template = "/api/v1/package/{uuid}/scenario/"
|
||||
total_reviewed = 125
|
||||
total_unreviewed = 35
|
||||
|
||||
@classmethod
|
||||
def create_reviewed_resource(cls, package, idx):
|
||||
return Scenario.create(
|
||||
package,
|
||||
f"Reviewed Scenario {idx:03d}",
|
||||
"Scenario for pagination tests",
|
||||
"2025-01-01",
|
||||
"lab",
|
||||
[],
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_unreviewed_resource(cls, package, idx):
|
||||
return Scenario.create(
|
||||
package,
|
||||
f"Draft Scenario {idx:03d}",
|
||||
"Scenario for pagination tests",
|
||||
"2025-01-01",
|
||||
"field",
|
||||
[],
|
||||
)
|
||||
301
epapi/tests/v1/test_scenario_creation.py
Normal file
301
epapi/tests/v1/test_scenario_creation.py
Normal file
@ -0,0 +1,301 @@
|
||||
"""
|
||||
Tests for Scenario Creation Endpoint Error Handling.
|
||||
|
||||
Tests comprehensive error handling for POST /api/v1/package/{uuid}/scenario/
|
||||
including package not found, permission denied, validation errors, and database errors.
|
||||
"""
|
||||
|
||||
from django.test import TestCase, tag
|
||||
import json
|
||||
from uuid import uuid4
|
||||
|
||||
from epdb.logic import PackageManager, UserManager
|
||||
from epdb.models import Scenario
|
||||
|
||||
|
||||
@tag("api", "scenario_creation")
|
||||
class ScenarioCreationAPITests(TestCase):
|
||||
"""Test scenario creation endpoint error handling."""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
"""Set up test data: users and packages."""
|
||||
cls.user = UserManager.create_user(
|
||||
"scenario-test-user",
|
||||
"scenario-test@envipath.com",
|
||||
"SuperSafe",
|
||||
set_setting=False,
|
||||
add_to_group=False,
|
||||
is_active=True,
|
||||
)
|
||||
cls.other_user = UserManager.create_user(
|
||||
"other-user",
|
||||
"other@envipath.com",
|
||||
"SuperSafe",
|
||||
set_setting=False,
|
||||
add_to_group=False,
|
||||
is_active=True,
|
||||
)
|
||||
cls.package = PackageManager.create_package(
|
||||
cls.user, "Test Package", "Test package for scenario creation"
|
||||
)
|
||||
|
||||
def test_create_scenario_package_not_found(self):
|
||||
"""Test that non-existent package UUID returns 404."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
fake_uuid = uuid4()
|
||||
payload = {
|
||||
"name": "Test Scenario",
|
||||
"description": "Test description",
|
||||
"scenario_date": "2024-01-01",
|
||||
"scenario_type": "biodegradation",
|
||||
"additional_information": [],
|
||||
}
|
||||
|
||||
response = self.client.post(
|
||||
f"/api/v1/package/{fake_uuid}/scenario/",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 404)
|
||||
self.assertIn("Package not found", response.json()["detail"])
|
||||
|
||||
def test_create_scenario_insufficient_permissions(self):
|
||||
"""Test that unauthorized access returns 403."""
|
||||
self.client.force_login(self.other_user)
|
||||
|
||||
payload = {
|
||||
"name": "Test Scenario",
|
||||
"description": "Test description",
|
||||
"scenario_date": "2024-01-01",
|
||||
"scenario_type": "biodegradation",
|
||||
"additional_information": [],
|
||||
}
|
||||
|
||||
response = self.client.post(
|
||||
f"/api/v1/package/{self.package.uuid}/scenario/",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 403)
|
||||
self.assertIn("permission", response.json()["detail"].lower())
|
||||
|
||||
def test_create_scenario_invalid_ai_type(self):
|
||||
"""Test that unknown additional information type returns 400."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
payload = {
|
||||
"name": "Test Scenario",
|
||||
"description": "Test description",
|
||||
"scenario_date": "2024-01-01",
|
||||
"scenario_type": "biodegradation",
|
||||
"additional_information": [
|
||||
{"type": "invalid_type_that_does_not_exist", "data": {"some_field": "some_value"}}
|
||||
],
|
||||
}
|
||||
|
||||
response = self.client.post(
|
||||
f"/api/v1/package/{self.package.uuid}/scenario/",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 400)
|
||||
response_data = response.json()
|
||||
self.assertIn("Validation errors", response_data["detail"])
|
||||
|
||||
def test_create_scenario_validation_error(self):
|
||||
"""Test that invalid additional information data returns 400."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
# Use malformed data structure for an actual AI type
|
||||
payload = {
|
||||
"name": "Test Scenario",
|
||||
"description": "Test description",
|
||||
"scenario_date": "2024-01-01",
|
||||
"scenario_type": "biodegradation",
|
||||
"additional_information": [
|
||||
{
|
||||
"type": "invalid_type_name",
|
||||
"data": None, # This should cause a validation error
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
response = self.client.post(
|
||||
f"/api/v1/package/{self.package.uuid}/scenario/",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
# Should return 422 for validation errors
|
||||
self.assertEqual(response.status_code, 422)
|
||||
|
||||
def test_create_scenario_success(self):
|
||||
"""Test that valid scenario creation returns 200."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
payload = {
|
||||
"name": "Test Scenario",
|
||||
"description": "Test description",
|
||||
"scenario_date": "2024-01-01",
|
||||
"scenario_type": "biodegradation",
|
||||
"additional_information": [],
|
||||
}
|
||||
|
||||
response = self.client.post(
|
||||
f"/api/v1/package/{self.package.uuid}/scenario/",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = response.json()
|
||||
self.assertEqual(data["name"], "Test Scenario")
|
||||
self.assertEqual(data["description"], "Test description")
|
||||
|
||||
# Verify scenario was actually created
|
||||
scenario = Scenario.objects.get(name="Test Scenario")
|
||||
self.assertEqual(scenario.package, self.package)
|
||||
self.assertEqual(scenario.scenario_type, "biodegradation")
|
||||
|
||||
def test_create_scenario_auto_name(self):
|
||||
"""Test that empty name triggers auto-generation."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
payload = {
|
||||
"name": "", # Empty name should be auto-generated
|
||||
"description": "Test description",
|
||||
"scenario_date": "2024-01-01",
|
||||
"scenario_type": "biodegradation",
|
||||
"additional_information": [],
|
||||
}
|
||||
|
||||
response = self.client.post(
|
||||
f"/api/v1/package/{self.package.uuid}/scenario/",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = response.json()
|
||||
# Auto-generated name should follow pattern "Scenario N"
|
||||
self.assertTrue(data["name"].startswith("Scenario "))
|
||||
|
||||
def test_create_scenario_xss_protection(self):
|
||||
"""Test that XSS attempts are sanitized."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
payload = {
|
||||
"name": "<script>alert('xss')</script>Clean Name",
|
||||
"description": "<img src=x onerror=alert('xss')>Description",
|
||||
"scenario_date": "2024-01-01",
|
||||
"scenario_type": "biodegradation",
|
||||
"additional_information": [],
|
||||
}
|
||||
|
||||
response = self.client.post(
|
||||
f"/api/v1/package/{self.package.uuid}/scenario/",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = response.json()
|
||||
# XSS should be cleaned out
|
||||
self.assertNotIn("<script>", data["name"])
|
||||
self.assertNotIn("onerror", data["description"])
|
||||
|
||||
def test_create_scenario_missing_required_field(self):
|
||||
"""Test that missing required fields returns validation error."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
# Missing 'name' field entirely
|
||||
payload = {
|
||||
"description": "Test description",
|
||||
"scenario_date": "2024-01-01",
|
||||
"scenario_type": "biodegradation",
|
||||
"additional_information": [],
|
||||
}
|
||||
|
||||
response = self.client.post(
|
||||
f"/api/v1/package/{self.package.uuid}/scenario/",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
# Should return 422 for schema validation errors
|
||||
self.assertEqual(response.status_code, 422)
|
||||
|
||||
def test_create_scenario_type_error_in_ai(self):
|
||||
"""Test that TypeError in AI instantiation returns 400."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
payload = {
|
||||
"name": "Test Scenario",
|
||||
"description": "Test description",
|
||||
"scenario_date": "2024-01-01",
|
||||
"scenario_type": "biodegradation",
|
||||
"additional_information": [
|
||||
{
|
||||
"type": "invalid_type_name",
|
||||
"data": "string instead of dict", # Wrong type
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
response = self.client.post(
|
||||
f"/api/v1/package/{self.package.uuid}/scenario/",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
# Should return 422 for validation errors
|
||||
self.assertEqual(response.status_code, 422)
|
||||
|
||||
def test_create_scenario_default_values(self):
|
||||
"""Test that default values are applied correctly."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
# Minimal payload with only name
|
||||
payload = {"name": "Minimal Scenario"}
|
||||
|
||||
response = self.client.post(
|
||||
f"/api/v1/package/{self.package.uuid}/scenario/",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = response.json()
|
||||
self.assertEqual(data["name"], "Minimal Scenario")
|
||||
# Check defaults are applied
|
||||
scenario = Scenario.objects.get(name="Minimal Scenario")
|
||||
# Default description from model is "no description"
|
||||
self.assertIn(scenario.description.lower(), ["", "no description"])
|
||||
|
||||
def test_create_scenario_unicode_characters(self):
|
||||
"""Test that unicode characters are handled properly."""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
payload = {
|
||||
"name": "Test Scenario 测试 🧪",
|
||||
"description": "Description with émojis and spëcial çhars",
|
||||
"scenario_date": "2024-01-01",
|
||||
"scenario_type": "biodegradation",
|
||||
"additional_information": [],
|
||||
}
|
||||
|
||||
response = self.client.post(
|
||||
f"/api/v1/package/{self.package.uuid}/scenario/",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = response.json()
|
||||
self.assertIn("测试", data["name"])
|
||||
self.assertIn("émojis", data["description"])
|
||||
114
epapi/tests/v1/test_schema_generation.py
Normal file
114
epapi/tests/v1/test_schema_generation.py
Normal file
@ -0,0 +1,114 @@
|
||||
"""
|
||||
Property-based tests for schema generation.
|
||||
|
||||
Tests that verify schema generation works correctly for all models,
|
||||
regardless of their structure.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from typing import Type
|
||||
from pydantic import BaseModel
|
||||
|
||||
from envipy_additional_information import registry, EnviPyModel
|
||||
from epapi.utils.schema_transformers import build_rjsf_output
|
||||
|
||||
|
||||
class TestSchemaGeneration:
|
||||
"""Test that all models can generate valid RJSF schemas."""
|
||||
|
||||
@pytest.mark.parametrize("model_name,model_cls", list(registry.list_models().items()))
|
||||
def test_all_models_generate_rjsf(self, model_name: str, model_cls: Type[BaseModel]):
|
||||
"""Every model in the registry should generate valid RJSF format."""
|
||||
# Skip non-EnviPyModel classes (parsers, etc.)
|
||||
if not issubclass(model_cls, EnviPyModel):
|
||||
pytest.skip(f"{model_name} is not an EnviPyModel")
|
||||
|
||||
# Should not raise exception
|
||||
result = build_rjsf_output(model_cls)
|
||||
|
||||
# Verify structure
|
||||
assert isinstance(result, dict), f"{model_name}: Result should be a dict"
|
||||
assert "schema" in result, f"{model_name}: Missing 'schema' key"
|
||||
assert "uiSchema" in result, f"{model_name}: Missing 'uiSchema' key"
|
||||
assert "formData" in result, f"{model_name}: Missing 'formData' key"
|
||||
assert "groups" in result, f"{model_name}: Missing 'groups' key"
|
||||
|
||||
# Verify types
|
||||
assert isinstance(result["schema"], dict), f"{model_name}: schema should be dict"
|
||||
assert isinstance(result["uiSchema"], dict), f"{model_name}: uiSchema should be dict"
|
||||
assert isinstance(result["formData"], dict), f"{model_name}: formData should be dict"
|
||||
assert isinstance(result["groups"], list), f"{model_name}: groups should be list"
|
||||
|
||||
# Verify schema has properties
|
||||
assert "properties" in result["schema"], f"{model_name}: schema should have 'properties'"
|
||||
assert isinstance(result["schema"]["properties"], dict), (
|
||||
f"{model_name}: properties should be dict"
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("model_name,model_cls", list(registry.list_models().items()))
|
||||
def test_ui_schema_matches_schema_fields(self, model_name: str, model_cls: Type[BaseModel]):
|
||||
"""uiSchema keys should match schema properties (or be nested for intervals)."""
|
||||
if not issubclass(model_cls, EnviPyModel):
|
||||
pytest.skip(f"{model_name} is not an EnviPyModel")
|
||||
|
||||
result = build_rjsf_output(model_cls)
|
||||
schema_props = set(result["schema"]["properties"].keys())
|
||||
ui_schema_keys = set(result["uiSchema"].keys())
|
||||
|
||||
# uiSchema should have entries for all top-level properties
|
||||
# (intervals may have nested start/end, but the main field should be present)
|
||||
assert ui_schema_keys.issubset(schema_props), (
|
||||
f"{model_name}: uiSchema has keys not in schema: {ui_schema_keys - schema_props}"
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("model_name,model_cls", list(registry.list_models().items()))
|
||||
def test_groups_is_list_of_strings(self, model_name: str, model_cls: Type[BaseModel]):
|
||||
"""Groups should be a list of strings."""
|
||||
if not issubclass(model_cls, EnviPyModel):
|
||||
pytest.skip(f"{model_name} is not an EnviPyModel")
|
||||
|
||||
result = build_rjsf_output(model_cls)
|
||||
groups = result["groups"]
|
||||
|
||||
assert isinstance(groups, list), f"{model_name}: groups should be list"
|
||||
assert all(isinstance(g, str) for g in groups), (
|
||||
f"{model_name}: all groups should be strings, got {groups}"
|
||||
)
|
||||
assert len(groups) > 0, f"{model_name}: should have at least one group"
|
||||
|
||||
@pytest.mark.parametrize("model_name,model_cls", list(registry.list_models().items()))
|
||||
def test_form_data_matches_schema(self, model_name: str, model_cls: Type[BaseModel]):
|
||||
"""formData keys should match schema properties."""
|
||||
if not issubclass(model_cls, EnviPyModel):
|
||||
pytest.skip(f"{model_name} is not an EnviPyModel")
|
||||
|
||||
result = build_rjsf_output(model_cls)
|
||||
schema_props = set(result["schema"]["properties"].keys())
|
||||
form_data_keys = set(result["formData"].keys())
|
||||
|
||||
# formData should only contain keys that are in schema
|
||||
assert form_data_keys.issubset(schema_props), (
|
||||
f"{model_name}: formData has keys not in schema: {form_data_keys - schema_props}"
|
||||
)
|
||||
|
||||
|
||||
class TestWidgetTypes:
|
||||
"""Test that widget types are valid."""
|
||||
|
||||
@pytest.mark.parametrize("model_name,model_cls", list(registry.list_models().items()))
|
||||
def test_widget_types_are_valid(self, model_name: str, model_cls: Type[BaseModel]):
|
||||
"""All widget types in uiSchema should be valid WidgetType values."""
|
||||
from envipy_additional_information.ui_config import WidgetType
|
||||
|
||||
if not issubclass(model_cls, EnviPyModel):
|
||||
pytest.skip(f"{model_name} is not an EnviPyModel")
|
||||
|
||||
result = build_rjsf_output(model_cls)
|
||||
valid_widgets = {wt.value for wt in WidgetType}
|
||||
|
||||
for field_name, ui_config in result["uiSchema"].items():
|
||||
widget = ui_config.get("ui:widget")
|
||||
if widget:
|
||||
assert widget in valid_widgets, (
|
||||
f"{model_name}.{field_name}: Invalid widget '{widget}'. Valid: {valid_widgets}"
|
||||
)
|
||||
94
epapi/tests/v1/test_token_auth.py
Normal file
94
epapi/tests/v1/test_token_auth.py
Normal file
@ -0,0 +1,94 @@
|
||||
from datetime import timedelta
|
||||
|
||||
from django.test import TestCase, tag
|
||||
from django.utils import timezone
|
||||
|
||||
from epdb.logic import PackageManager, UserManager
|
||||
from epdb.models import APIToken
|
||||
|
||||
|
||||
@tag("api", "auth")
|
||||
class BearerTokenAuthTests(TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
cls.user = UserManager.create_user(
|
||||
"token-user",
|
||||
"token-user@envipath.com",
|
||||
"SuperSafe",
|
||||
set_setting=False,
|
||||
add_to_group=False,
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
default_pkg = cls.user.default_package
|
||||
cls.user.default_package = None
|
||||
cls.user.save()
|
||||
if default_pkg:
|
||||
default_pkg.delete()
|
||||
|
||||
cls.unreviewed_package = PackageManager.create_package(
|
||||
cls.user, "Token Auth Package", "Package for token auth tests"
|
||||
)
|
||||
|
||||
def _auth_header(self, raw_token):
|
||||
return {"HTTP_AUTHORIZATION": f"Bearer {raw_token}"}
|
||||
|
||||
def test_valid_token_allows_access(self):
|
||||
_, raw_token = APIToken.create_token(self.user, name="Valid Token", expires_days=1)
|
||||
|
||||
response = self.client.get("/api/v1/compounds/", **self._auth_header(raw_token))
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_expired_token_rejected(self):
|
||||
token, raw_token = APIToken.create_token(self.user, name="Expired Token", expires_days=1)
|
||||
token.expires_at = timezone.now() - timedelta(days=1)
|
||||
token.save(update_fields=["expires_at"])
|
||||
|
||||
response = self.client.get("/api/v1/compounds/", **self._auth_header(raw_token))
|
||||
|
||||
self.assertEqual(response.status_code, 401)
|
||||
|
||||
def test_inactive_token_rejected(self):
|
||||
token, raw_token = APIToken.create_token(self.user, name="Inactive Token", expires_days=1)
|
||||
token.is_active = False
|
||||
token.save(update_fields=["is_active"])
|
||||
|
||||
response = self.client.get("/api/v1/compounds/", **self._auth_header(raw_token))
|
||||
|
||||
self.assertEqual(response.status_code, 401)
|
||||
|
||||
def test_invalid_token_rejected(self):
|
||||
response = self.client.get("/api/v1/compounds/", HTTP_AUTHORIZATION="Bearer invalid-token")
|
||||
|
||||
self.assertEqual(response.status_code, 401)
|
||||
|
||||
def test_no_token_rejected(self):
|
||||
self.client.logout()
|
||||
response = self.client.get("/api/v1/compounds/")
|
||||
|
||||
self.assertEqual(response.status_code, 401)
|
||||
|
||||
def test_bearer_populates_request_user_for_packages(self):
|
||||
response = self.client.get("/api/v1/packages/")
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
uuids = {item["uuid"] for item in payload["items"]}
|
||||
self.assertNotIn(str(self.unreviewed_package.uuid), uuids)
|
||||
|
||||
_, raw_token = APIToken.create_token(self.user, name="Package Token", expires_days=1)
|
||||
response = self.client.get("/api/v1/packages/", **self._auth_header(raw_token))
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
uuids = {item["uuid"] for item in payload["items"]}
|
||||
self.assertIn(str(self.unreviewed_package.uuid), uuids)
|
||||
|
||||
def test_session_auth_still_works_without_bearer(self):
|
||||
self.client.force_login(self.user)
|
||||
response = self.client.get("/api/v1/packages/")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
payload = response.json()
|
||||
uuids = {item["uuid"] for item in payload["items"]}
|
||||
self.assertIn(str(self.unreviewed_package.uuid), uuids)
|
||||
0
epapi/utils/__init__.py
Normal file
0
epapi/utils/__init__.py
Normal file
181
epapi/utils/schema_transformers.py
Normal file
181
epapi/utils/schema_transformers.py
Normal file
@ -0,0 +1,181 @@
|
||||
"""
|
||||
Schema transformation utilities for converting Pydantic models to RJSF format.
|
||||
|
||||
This module provides functions to extract UI configuration from Pydantic models
|
||||
and transform them into React JSON Schema Form (RJSF) compatible format.
|
||||
"""
|
||||
|
||||
from typing import Type, Optional, Any
|
||||
|
||||
import jsonref
|
||||
from pydantic import BaseModel
|
||||
|
||||
from envipy_additional_information.ui_config import UIConfig
|
||||
from envipy_additional_information import registry
|
||||
|
||||
|
||||
def extract_groups(model_cls: Type[BaseModel]) -> list[str]:
|
||||
"""
|
||||
Extract groups from registry-stored group information.
|
||||
|
||||
Args:
|
||||
model_cls: The model class
|
||||
|
||||
Returns:
|
||||
List of group names the model belongs to
|
||||
"""
|
||||
return registry.get_groups(model_cls)
|
||||
|
||||
|
||||
def extract_ui_metadata(model_cls: Type[BaseModel]) -> dict[str, Any]:
|
||||
"""
|
||||
Extract model-level UI metadata from UI class.
|
||||
|
||||
Returns metadata attributes that are NOT UIConfig instances.
|
||||
Common metadata includes: unit, description, title.
|
||||
"""
|
||||
metadata: dict[str, Any] = {}
|
||||
|
||||
if not hasattr(model_cls, "UI"):
|
||||
return metadata
|
||||
|
||||
ui_class = getattr(model_cls, "UI")
|
||||
|
||||
# Iterate over all attributes in the UI class
|
||||
for attr_name in dir(ui_class):
|
||||
# Skip private attributes
|
||||
if attr_name.startswith("_"):
|
||||
continue
|
||||
|
||||
# Get the attribute value
|
||||
try:
|
||||
attr_value = getattr(ui_class, attr_name)
|
||||
except AttributeError:
|
||||
continue
|
||||
|
||||
# Skip callables but keep types/classes
|
||||
if callable(attr_value) and not isinstance(attr_value, type):
|
||||
continue
|
||||
|
||||
# Skip UIConfig instances (these are field-level configs, not metadata)
|
||||
# This includes both UIConfig and IntervalConfig
|
||||
if isinstance(attr_value, UIConfig):
|
||||
continue
|
||||
|
||||
metadata[attr_name] = attr_value
|
||||
|
||||
return metadata
|
||||
|
||||
|
||||
def extract_ui_config_from_model(model_cls: Type[BaseModel]) -> dict[str, Any]:
|
||||
"""
|
||||
Extract UI configuration from model's UI class.
|
||||
|
||||
Returns a dictionary mapping field names to their UI schema configurations.
|
||||
Trusts the config classes to handle their own transformation logic.
|
||||
"""
|
||||
ui_configs: dict[str, Any] = {}
|
||||
|
||||
if not hasattr(model_cls, "UI"):
|
||||
return ui_configs
|
||||
|
||||
ui_class = getattr(model_cls, "UI")
|
||||
schema = model_cls.model_json_schema()
|
||||
field_names = schema.get("properties", {}).keys()
|
||||
|
||||
# Extract config for each field
|
||||
for field_name in field_names:
|
||||
# Skip if UI config doesn't exist for this field (field may be hidden from UI)
|
||||
if not hasattr(ui_class, field_name):
|
||||
continue
|
||||
|
||||
ui_config = getattr(ui_class, field_name)
|
||||
|
||||
if isinstance(ui_config, UIConfig):
|
||||
ui_configs[field_name] = ui_config.to_ui_schema_field()
|
||||
|
||||
return ui_configs
|
||||
|
||||
|
||||
def build_ui_schema(model_cls: Type[BaseModel]) -> dict:
|
||||
"""Generate RJSF uiSchema from model's UI class."""
|
||||
ui_schema = {}
|
||||
|
||||
# Extract field-level UI configs
|
||||
field_configs = extract_ui_config_from_model(model_cls)
|
||||
|
||||
for field_name, config in field_configs.items():
|
||||
ui_schema[field_name] = config
|
||||
|
||||
return ui_schema
|
||||
|
||||
|
||||
def build_schema(model_cls: Type[BaseModel]) -> dict[str, Any]:
|
||||
"""
|
||||
Build JSON schema from Pydantic model, applying UI metadata.
|
||||
|
||||
Dereferences all $ref pointers to produce fully inlined schema.
|
||||
This ensures the frontend receives schemas with enum values and nested
|
||||
properties fully resolved, without needing client-side ref resolution.
|
||||
|
||||
Extracts model-level metadata from UI class (title, unit, etc.) and applies
|
||||
it to the generated schema. This ensures UI metadata is the single source of truth.
|
||||
"""
|
||||
schema = model_cls.model_json_schema()
|
||||
|
||||
# Dereference $ref pointers (inlines $defs) using jsonref
|
||||
# This ensures the frontend receives schemas with enum values and nested
|
||||
# properties fully resolved, currently necessary for client-side rendering.
|
||||
# FIXME: This is a hack to get the schema to work with alpine schema-form.js replace once we migrate to client-side framework.
|
||||
schema = jsonref.replace_refs(schema, proxies=False)
|
||||
|
||||
# Remove $defs section as all refs are now inlined
|
||||
if "$defs" in schema:
|
||||
del schema["$defs"]
|
||||
|
||||
# Extract and apply UI metadata (title, unit, description, etc.)
|
||||
ui_metadata = extract_ui_metadata(model_cls)
|
||||
|
||||
# Apply all metadata consistently as custom properties with x- prefix
|
||||
# This ensures consistency and avoids conflicts with standard JSON Schema properties
|
||||
for key, value in ui_metadata.items():
|
||||
if value is not None:
|
||||
schema[f"x-{key}"] = value
|
||||
|
||||
# Set standard title property from UI metadata for JSON Schema compliance
|
||||
if "title" in ui_metadata:
|
||||
schema["title"] = ui_metadata["title"]
|
||||
elif "label" in ui_metadata:
|
||||
schema["title"] = ui_metadata["label"]
|
||||
|
||||
return schema
|
||||
|
||||
|
||||
def build_rjsf_output(model_cls: Type[BaseModel], initial_data: Optional[dict] = None) -> dict:
|
||||
"""
|
||||
Main function that returns complete RJSF format.
|
||||
|
||||
Trusts the config classes to handle their own transformation logic.
|
||||
No special-case handling - if a config knows how to transform itself, it will.
|
||||
|
||||
Returns:
|
||||
dict with keys: schema, uiSchema, formData, groups
|
||||
"""
|
||||
# Build schema with UI metadata applied
|
||||
schema = build_schema(model_cls)
|
||||
|
||||
# Build UI schema - config classes handle their own transformation
|
||||
ui_schema = build_ui_schema(model_cls)
|
||||
|
||||
# Extract groups from marker interfaces
|
||||
groups = extract_groups(model_cls)
|
||||
|
||||
# Use provided initial_data or empty dict
|
||||
form_data = initial_data if initial_data is not None else {}
|
||||
|
||||
return {
|
||||
"schema": schema,
|
||||
"uiSchema": ui_schema,
|
||||
"formData": form_data,
|
||||
"groups": groups,
|
||||
}
|
||||
82
epapi/utils/validation_errors.py
Normal file
82
epapi/utils/validation_errors.py
Normal file
@ -0,0 +1,82 @@
|
||||
"""Shared utilities for handling Pydantic validation errors."""
|
||||
|
||||
import json
|
||||
from pydantic import ValidationError
|
||||
from pydantic_core import ErrorDetails
|
||||
from ninja.errors import HttpError
|
||||
|
||||
|
||||
def format_validation_error(error: ErrorDetails) -> str:
|
||||
"""Format a Pydantic validation error into a user-friendly message.
|
||||
|
||||
Args:
|
||||
error: A Pydantic error details dictionary containing 'msg', 'type', 'ctx', etc.
|
||||
|
||||
Returns:
|
||||
A user-friendly error message string.
|
||||
"""
|
||||
msg = error.get("msg") or "Invalid value"
|
||||
error_type = error.get("type") or ""
|
||||
|
||||
# Handle common validation types with friendly messages
|
||||
if error_type == "enum":
|
||||
ctx = error.get("ctx", {})
|
||||
expected = ctx.get("expected", "") if ctx else ""
|
||||
return f"Please select a valid option{': ' + expected if expected else ''}"
|
||||
elif error_type == "literal_error":
|
||||
# Literal errors (like Literal["active", "inactive"])
|
||||
return msg.replace("Input should be ", "Please enter ")
|
||||
elif error_type == "missing":
|
||||
return "This field is required"
|
||||
elif error_type == "string_type":
|
||||
return "Please enter a valid string"
|
||||
elif error_type == "int_type":
|
||||
return "Please enter a valid int"
|
||||
elif error_type == "int_parsing":
|
||||
return "Please enter a valid int"
|
||||
elif error_type == "float_type":
|
||||
return "Please enter a valid float"
|
||||
elif error_type == "float_parsing":
|
||||
return "Please enter a valid float"
|
||||
elif error_type == "value_error":
|
||||
# Strip "Value error, " prefix from custom validator messages
|
||||
return msg.replace("Value error, ", "")
|
||||
else:
|
||||
# Default: use the message from Pydantic but clean it up
|
||||
return msg.replace("Input should be ", "Please enter ").replace("Value error, ", "")
|
||||
|
||||
|
||||
def handle_validation_error(e: ValidationError) -> None:
|
||||
"""Convert a Pydantic ValidationError into a structured HttpError.
|
||||
|
||||
This function transforms Pydantic validation errors into a JSON structure
|
||||
that the frontend expects for displaying field-level errors.
|
||||
|
||||
Args:
|
||||
e: The Pydantic ValidationError to handle.
|
||||
|
||||
Raises:
|
||||
HttpError: Always raises a 400 error with structured JSON containing
|
||||
type, field_errors, and message fields.
|
||||
"""
|
||||
# Transform Pydantic validation errors into user-friendly format
|
||||
field_errors: dict[str, list[str]] = {}
|
||||
for error in e.errors():
|
||||
# Get the field name from location tuple
|
||||
loc = error.get("loc", ())
|
||||
field = str(loc[-1]) if loc else "root"
|
||||
|
||||
# Format the error message
|
||||
friendly_msg = format_validation_error(error)
|
||||
|
||||
if field not in field_errors:
|
||||
field_errors[field] = []
|
||||
field_errors[field].append(friendly_msg)
|
||||
|
||||
# Return structured error for frontend parsing
|
||||
error_response = {
|
||||
"type": "validation_error",
|
||||
"field_errors": field_errors,
|
||||
"message": "Please correct the errors below",
|
||||
}
|
||||
raise HttpError(400, json.dumps(error_response))
|
||||
0
epapi/v1/__init__.py
Normal file
0
epapi/v1/__init__.py
Normal file
34
epapi/v1/auth.py
Normal file
34
epapi/v1/auth.py
Normal file
@ -0,0 +1,34 @@
|
||||
import hashlib
|
||||
|
||||
from ninja.security import HttpBearer
|
||||
from ninja.errors import HttpError
|
||||
|
||||
from epdb.models import APIToken
|
||||
|
||||
|
||||
class BearerTokenAuth(HttpBearer):
|
||||
def authenticate(self, request, token):
|
||||
if token is None:
|
||||
return None
|
||||
|
||||
hashed_token = hashlib.sha256(token.encode()).hexdigest()
|
||||
user = APIToken.authenticate(hashed_token, hashed=True)
|
||||
if not user:
|
||||
raise HttpError(401, "Invalid or expired token")
|
||||
|
||||
request.user = user
|
||||
return user
|
||||
|
||||
|
||||
class OptionalBearerTokenAuth:
|
||||
"""Bearer auth that allows unauthenticated access.
|
||||
|
||||
Validates the Bearer token if present (401 on invalid token),
|
||||
otherwise lets the request through for anonymous/session access.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._bearer = BearerTokenAuth()
|
||||
|
||||
def __call__(self, request):
|
||||
return self._bearer(request) or request.user
|
||||
119
epapi/v1/dal.py
Normal file
119
epapi/v1/dal.py
Normal file
@ -0,0 +1,119 @@
|
||||
from django.db.models import Model
|
||||
from epdb.logic import PackageManager
|
||||
from epdb.models import CompoundStructure, User, Package, Compound, Scenario
|
||||
from uuid import UUID
|
||||
|
||||
from .errors import EPAPINotFoundError, EPAPIPermissionDeniedError
|
||||
|
||||
|
||||
def get_compound_for_read(user, compound_uuid: UUID):
|
||||
"""
|
||||
Get compound by UUID with permission check.
|
||||
"""
|
||||
try:
|
||||
compound = Compound.objects.get(uuid=compound_uuid)
|
||||
package = compound.package
|
||||
except Compound.DoesNotExist:
|
||||
raise EPAPINotFoundError(f"Compound with UUID {compound_uuid} not found")
|
||||
|
||||
# FIXME: optimize package manager to exclusively work with UUIDs
|
||||
if not user or user.is_anonymous or not PackageManager.readable(user, package):
|
||||
raise EPAPIPermissionDeniedError("Insufficient permissions to access this compound.")
|
||||
|
||||
return compound
|
||||
|
||||
|
||||
def get_package_for_read(user, package_uuid: UUID):
|
||||
"""
|
||||
Get package by UUID with permission check.
|
||||
"""
|
||||
|
||||
# FIXME: update package manager with custom exceptions to avoid manual checks here
|
||||
try:
|
||||
package = Package.objects.get(uuid=package_uuid)
|
||||
except Package.DoesNotExist:
|
||||
raise EPAPINotFoundError(f"Package with UUID {package_uuid} not found")
|
||||
|
||||
# FIXME: optimize package manager to exclusively work with UUIDs
|
||||
if not user or user.is_anonymous or not PackageManager.readable(user, package):
|
||||
raise EPAPIPermissionDeniedError("Insufficient permissions to access this package.")
|
||||
|
||||
return package
|
||||
|
||||
|
||||
def get_scenario_for_read(user, scenario_uuid: UUID):
|
||||
"""Get scenario by UUID with read permission check."""
|
||||
try:
|
||||
scenario = Scenario.objects.select_related("package").get(uuid=scenario_uuid)
|
||||
except Scenario.DoesNotExist:
|
||||
raise EPAPINotFoundError(f"Scenario with UUID {scenario_uuid} not found")
|
||||
|
||||
if not user or user.is_anonymous or not PackageManager.readable(user, scenario.package):
|
||||
raise EPAPIPermissionDeniedError("Insufficient permissions to access this scenario.")
|
||||
|
||||
return scenario
|
||||
|
||||
|
||||
def get_scenario_for_write(user, scenario_uuid: UUID):
|
||||
"""Get scenario by UUID with write permission check."""
|
||||
try:
|
||||
scenario = Scenario.objects.select_related("package").get(uuid=scenario_uuid)
|
||||
except Scenario.DoesNotExist:
|
||||
raise EPAPINotFoundError(f"Scenario with UUID {scenario_uuid} not found")
|
||||
|
||||
if not user or user.is_anonymous or not PackageManager.writable(user, scenario.package):
|
||||
raise EPAPIPermissionDeniedError("Insufficient permissions to modify this scenario.")
|
||||
|
||||
return scenario
|
||||
|
||||
|
||||
def get_user_packages_for_read(user: User | None):
|
||||
"""Get all packages readable by the user."""
|
||||
if not user or user.is_anonymous:
|
||||
return PackageManager.get_reviewed_packages()
|
||||
return PackageManager.get_all_readable_packages(user, include_reviewed=True)
|
||||
|
||||
|
||||
def get_user_entities_for_read(model_class: Model, user: User | None):
|
||||
"""Build queryset for reviewed package entities."""
|
||||
|
||||
if not user or user.is_anonymous:
|
||||
return model_class.objects.filter(package__reviewed=True).select_related("package")
|
||||
|
||||
qs = model_class.objects.filter(
|
||||
package__in=PackageManager.get_all_readable_packages(user, include_reviewed=True)
|
||||
).select_related("package")
|
||||
return qs
|
||||
|
||||
|
||||
def get_package_entities_for_read(model_class: Model, package_uuid: UUID, user: User | None = None):
|
||||
"""Build queryset for specific package entities."""
|
||||
package = get_package_for_read(user, package_uuid)
|
||||
qs = model_class.objects.filter(package=package).select_related("package")
|
||||
return qs
|
||||
|
||||
|
||||
def get_user_structure_for_read(user: User | None):
|
||||
"""Build queryset for structures accessible to the user (via compound->package)."""
|
||||
|
||||
if not user or user.is_anonymous:
|
||||
return CompoundStructure.objects.filter(compound__package__reviewed=True).select_related(
|
||||
"compound__package"
|
||||
)
|
||||
|
||||
qs = CompoundStructure.objects.filter(
|
||||
compound__package__in=PackageManager.get_all_readable_packages(user, include_reviewed=True)
|
||||
).select_related("compound__package")
|
||||
return qs
|
||||
|
||||
|
||||
def get_package_compound_structure_for_read(
|
||||
package_uuid: UUID, compound_uuid: UUID, user: User | None = None
|
||||
):
|
||||
"""Build queryset for specific package compound structures."""
|
||||
|
||||
get_package_for_read(user, package_uuid)
|
||||
compound = get_compound_for_read(user, compound_uuid)
|
||||
|
||||
qs = CompoundStructure.objects.filter(compound=compound).select_related("compound__package")
|
||||
return qs
|
||||
0
epapi/v1/endpoints/__init__.py
Normal file
0
epapi/v1/endpoints/__init__.py
Normal file
174
epapi/v1/endpoints/additional_information.py
Normal file
174
epapi/v1/endpoints/additional_information.py
Normal file
@ -0,0 +1,174 @@
|
||||
from ninja import Router, Body
|
||||
from ninja.errors import HttpError
|
||||
from uuid import UUID
|
||||
from pydantic import ValidationError
|
||||
from typing import Dict, Any
|
||||
import logging
|
||||
|
||||
from envipy_additional_information import registry
|
||||
from envipy_additional_information.groups import GroupEnum
|
||||
from epapi.utils.schema_transformers import build_rjsf_output
|
||||
from epapi.utils.validation_errors import handle_validation_error
|
||||
from ..dal import get_scenario_for_read, get_scenario_for_write
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = Router(tags=["Additional Information"])
|
||||
|
||||
|
||||
@router.get("/information/schema/")
|
||||
def list_all_schemas(request):
|
||||
"""Return all schemas in RJSF format with lowercase class names as keys."""
|
||||
result = {}
|
||||
for name, cls in registry.list_models().items():
|
||||
try:
|
||||
result[name] = build_rjsf_output(cls)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to generate schema for {name}: {e}")
|
||||
continue
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/information/schema/{model_name}/")
|
||||
def get_model_schema(request, model_name: str):
|
||||
"""Return RJSF schema for specific model."""
|
||||
cls = registry.get_model(model_name.lower())
|
||||
if not cls:
|
||||
raise HttpError(404, f"Unknown model: {model_name}")
|
||||
return build_rjsf_output(cls)
|
||||
|
||||
|
||||
@router.get("/scenario/{uuid:scenario_uuid}/information/")
|
||||
def list_scenario_info(request, scenario_uuid: UUID):
|
||||
"""List all additional information for a scenario"""
|
||||
scenario = get_scenario_for_read(request.user, scenario_uuid)
|
||||
|
||||
result = []
|
||||
for ai in scenario.get_additional_information():
|
||||
result.append(
|
||||
{
|
||||
"type": ai.__class__.__name__,
|
||||
"uuid": getattr(ai, "uuid", None),
|
||||
"data": ai.model_dump(mode="json"),
|
||||
}
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
@router.post("/scenario/{uuid:scenario_uuid}/information/{model_name}/")
|
||||
def add_scenario_info(
|
||||
request, scenario_uuid: UUID, model_name: str, payload: Dict[str, Any] = Body(...)
|
||||
):
|
||||
"""Add new additional information to scenario"""
|
||||
cls = registry.get_model(model_name.lower())
|
||||
if not cls:
|
||||
raise HttpError(404, f"Unknown model: {model_name}")
|
||||
|
||||
try:
|
||||
instance = cls(**payload) # Pydantic validates
|
||||
except ValidationError as e:
|
||||
handle_validation_error(e)
|
||||
|
||||
scenario = get_scenario_for_write(request.user, scenario_uuid)
|
||||
|
||||
# Model method now returns the UUID
|
||||
created_uuid = scenario.add_additional_information(instance)
|
||||
|
||||
return {"status": "created", "uuid": created_uuid}
|
||||
|
||||
|
||||
@router.patch("/scenario/{uuid:scenario_uuid}/information/item/{uuid:ai_uuid}/")
|
||||
def update_scenario_info(
|
||||
request, scenario_uuid: UUID, ai_uuid: UUID, payload: Dict[str, Any] = Body(...)
|
||||
):
|
||||
"""Update existing additional information for a scenario"""
|
||||
scenario = get_scenario_for_write(request.user, scenario_uuid)
|
||||
ai_uuid_str = str(ai_uuid)
|
||||
|
||||
# Find item to determine type for validation
|
||||
found_type = None
|
||||
for type_name, items in scenario.additional_information.items():
|
||||
if any(item.get("uuid") == ai_uuid_str for item in items):
|
||||
found_type = type_name
|
||||
break
|
||||
|
||||
if found_type is None:
|
||||
raise HttpError(404, f"Additional information not found: {ai_uuid}")
|
||||
|
||||
# Get the model class for validation
|
||||
cls = registry.get_model(found_type.lower())
|
||||
if not cls:
|
||||
raise HttpError(500, f"Unknown model type in data: {found_type}")
|
||||
|
||||
# Validate the payload against the model
|
||||
try:
|
||||
instance = cls(**payload)
|
||||
except ValidationError as e:
|
||||
handle_validation_error(e)
|
||||
|
||||
# Use model method for update
|
||||
try:
|
||||
scenario.update_additional_information(ai_uuid_str, instance)
|
||||
except ValueError as e:
|
||||
raise HttpError(404, str(e))
|
||||
|
||||
return {"status": "updated", "uuid": ai_uuid_str}
|
||||
|
||||
|
||||
@router.delete("/scenario/{uuid:scenario_uuid}/information/item/{uuid:ai_uuid}/")
|
||||
def delete_scenario_info(request, scenario_uuid: UUID, ai_uuid: UUID):
|
||||
"""Delete additional information from scenario"""
|
||||
scenario = get_scenario_for_write(request.user, scenario_uuid)
|
||||
|
||||
try:
|
||||
scenario.remove_additional_information(str(ai_uuid))
|
||||
except ValueError as e:
|
||||
raise HttpError(404, str(e))
|
||||
|
||||
return {"status": "deleted"}
|
||||
|
||||
|
||||
@router.get("/information/groups/")
|
||||
def list_groups(request):
|
||||
"""Return list of available group names."""
|
||||
return {"groups": GroupEnum.values()}
|
||||
|
||||
|
||||
@router.get("/information/groups/{group_name}/")
|
||||
def get_group_models(request, group_name: str):
|
||||
"""
|
||||
Return models for a specific group organized by subcategory.
|
||||
|
||||
Args:
|
||||
group_name: One of "sludge", "soil", or "sediment" (string)
|
||||
|
||||
Returns:
|
||||
Dictionary with subcategories (exp, spike, comp, misc, or group name)
|
||||
as keys and lists of model info as values
|
||||
"""
|
||||
# Convert string to enum (raises ValueError if invalid)
|
||||
try:
|
||||
group_enum = GroupEnum(group_name)
|
||||
except ValueError:
|
||||
valid = ", ".join(GroupEnum.values())
|
||||
raise HttpError(400, f"Invalid group '{group_name}'. Valid: {valid}")
|
||||
|
||||
try:
|
||||
group_data = registry.collect_group(group_enum)
|
||||
except (ValueError, TypeError) as e:
|
||||
raise HttpError(400, str(e))
|
||||
|
||||
result = {}
|
||||
for subcategory, models in group_data.items():
|
||||
result[subcategory] = [
|
||||
{
|
||||
"name": cls.__name__.lower(),
|
||||
"class": cls.__name__,
|
||||
"title": getattr(cls.UI, "title", cls.__name__)
|
||||
if hasattr(cls, "UI")
|
||||
else cls.__name__,
|
||||
}
|
||||
for cls in models
|
||||
]
|
||||
|
||||
return result
|
||||
41
epapi/v1/endpoints/compounds.py
Normal file
41
epapi/v1/endpoints/compounds.py
Normal file
@ -0,0 +1,41 @@
|
||||
from django.conf import settings as s
|
||||
from ninja import Router
|
||||
from ninja_extra.pagination import paginate
|
||||
from uuid import UUID
|
||||
|
||||
from epdb.models import Compound
|
||||
from ..pagination import EnhancedPageNumberPagination
|
||||
from ..schemas import CompoundOutSchema, ReviewStatusFilter
|
||||
from ..dal import get_user_entities_for_read, get_package_entities_for_read
|
||||
|
||||
router = Router()
|
||||
|
||||
|
||||
@router.get("/compounds/", response=EnhancedPageNumberPagination.Output[CompoundOutSchema])
|
||||
@paginate(
|
||||
EnhancedPageNumberPagination,
|
||||
page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE,
|
||||
filter_schema=ReviewStatusFilter,
|
||||
)
|
||||
def list_all_compounds(request):
|
||||
"""
|
||||
List all compounds from reviewed packages.
|
||||
"""
|
||||
return get_user_entities_for_read(Compound, request.user).order_by("name").all()
|
||||
|
||||
|
||||
@router.get(
|
||||
"/package/{uuid:package_uuid}/compound/",
|
||||
response=EnhancedPageNumberPagination.Output[CompoundOutSchema],
|
||||
)
|
||||
@paginate(
|
||||
EnhancedPageNumberPagination,
|
||||
page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE,
|
||||
filter_schema=ReviewStatusFilter,
|
||||
)
|
||||
def list_package_compounds(request, package_uuid: UUID):
|
||||
"""
|
||||
List all compounds for a specific package.
|
||||
"""
|
||||
user = request.user
|
||||
return get_package_entities_for_read(Compound, package_uuid, user).order_by("name").all()
|
||||
41
epapi/v1/endpoints/models.py
Normal file
41
epapi/v1/endpoints/models.py
Normal file
@ -0,0 +1,41 @@
|
||||
from django.conf import settings as s
|
||||
from ninja import Router
|
||||
from ninja_extra.pagination import paginate
|
||||
from uuid import UUID
|
||||
|
||||
from epdb.models import EPModel
|
||||
from ..pagination import EnhancedPageNumberPagination
|
||||
from ..schemas import ModelOutSchema, ReviewStatusFilter
|
||||
from ..dal import get_user_entities_for_read, get_package_entities_for_read
|
||||
|
||||
router = Router()
|
||||
|
||||
|
||||
@router.get("/models/", response=EnhancedPageNumberPagination.Output[ModelOutSchema])
|
||||
@paginate(
|
||||
EnhancedPageNumberPagination,
|
||||
page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE,
|
||||
filter_schema=ReviewStatusFilter,
|
||||
)
|
||||
def list_all_models(request):
|
||||
"""
|
||||
List all models from reviewed packages.
|
||||
"""
|
||||
return get_user_entities_for_read(EPModel, request.user).order_by("name").all()
|
||||
|
||||
|
||||
@router.get(
|
||||
"/package/{uuid:package_uuid}/model/",
|
||||
response=EnhancedPageNumberPagination.Output[ModelOutSchema],
|
||||
)
|
||||
@paginate(
|
||||
EnhancedPageNumberPagination,
|
||||
page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE,
|
||||
filter_schema=ReviewStatusFilter,
|
||||
)
|
||||
def list_package_models(request, package_uuid: UUID):
|
||||
"""
|
||||
List all models for a specific package.
|
||||
"""
|
||||
user = request.user
|
||||
return get_package_entities_for_read(EPModel, package_uuid, user).order_by("name").all()
|
||||
32
epapi/v1/endpoints/packages.py
Normal file
32
epapi/v1/endpoints/packages.py
Normal file
@ -0,0 +1,32 @@
|
||||
from django.conf import settings as s
|
||||
from ninja import Router
|
||||
from ninja_extra.pagination import paginate
|
||||
import logging
|
||||
|
||||
from ..auth import OptionalBearerTokenAuth
|
||||
from ..dal import get_user_packages_for_read
|
||||
from ..pagination import EnhancedPageNumberPagination
|
||||
from ..schemas import PackageOutSchema, SelfReviewStatusFilter
|
||||
|
||||
router = Router()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/packages/",
|
||||
response=EnhancedPageNumberPagination.Output[PackageOutSchema],
|
||||
auth=OptionalBearerTokenAuth(),
|
||||
)
|
||||
@paginate(
|
||||
EnhancedPageNumberPagination,
|
||||
page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE,
|
||||
filter_schema=SelfReviewStatusFilter,
|
||||
)
|
||||
def list_all_packages(request):
|
||||
"""
|
||||
List packages accessible to the user.
|
||||
|
||||
"""
|
||||
user = request.user
|
||||
qs = get_user_packages_for_read(user)
|
||||
return qs.order_by("name").all()
|
||||
42
epapi/v1/endpoints/pathways.py
Normal file
42
epapi/v1/endpoints/pathways.py
Normal file
@ -0,0 +1,42 @@
|
||||
from django.conf import settings as s
|
||||
from ninja import Router
|
||||
from ninja_extra.pagination import paginate
|
||||
from uuid import UUID
|
||||
|
||||
from epdb.models import Pathway
|
||||
from ..pagination import EnhancedPageNumberPagination
|
||||
from ..schemas import PathwayOutSchema, ReviewStatusFilter
|
||||
from ..dal import get_user_entities_for_read, get_package_entities_for_read
|
||||
|
||||
router = Router()
|
||||
|
||||
|
||||
@router.get("/pathways/", response=EnhancedPageNumberPagination.Output[PathwayOutSchema])
|
||||
@paginate(
|
||||
EnhancedPageNumberPagination,
|
||||
page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE,
|
||||
filter_schema=ReviewStatusFilter,
|
||||
)
|
||||
def list_all_pathways(request):
|
||||
"""
|
||||
List all pathways from reviewed packages.
|
||||
"""
|
||||
user = request.user
|
||||
return get_user_entities_for_read(Pathway, user).order_by("name").all()
|
||||
|
||||
|
||||
@router.get(
|
||||
"/package/{uuid:package_uuid}/pathway/",
|
||||
response=EnhancedPageNumberPagination.Output[PathwayOutSchema],
|
||||
)
|
||||
@paginate(
|
||||
EnhancedPageNumberPagination,
|
||||
page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE,
|
||||
filter_schema=ReviewStatusFilter,
|
||||
)
|
||||
def list_package_pathways(request, package_uuid: UUID):
|
||||
"""
|
||||
List all pathways for a specific package.
|
||||
"""
|
||||
user = request.user
|
||||
return get_package_entities_for_read(Pathway, package_uuid, user).order_by("name").all()
|
||||
42
epapi/v1/endpoints/reactions.py
Normal file
42
epapi/v1/endpoints/reactions.py
Normal file
@ -0,0 +1,42 @@
|
||||
from django.conf import settings as s
|
||||
from ninja import Router
|
||||
from ninja_extra.pagination import paginate
|
||||
from uuid import UUID
|
||||
|
||||
from epdb.models import Reaction
|
||||
from ..pagination import EnhancedPageNumberPagination
|
||||
from ..schemas import ReactionOutSchema, ReviewStatusFilter
|
||||
from ..dal import get_user_entities_for_read, get_package_entities_for_read
|
||||
|
||||
router = Router()
|
||||
|
||||
|
||||
@router.get("/reactions/", response=EnhancedPageNumberPagination.Output[ReactionOutSchema])
|
||||
@paginate(
|
||||
EnhancedPageNumberPagination,
|
||||
page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE,
|
||||
filter_schema=ReviewStatusFilter,
|
||||
)
|
||||
def list_all_reactions(request):
|
||||
"""
|
||||
List all reactions from reviewed packages.
|
||||
"""
|
||||
user = request.user
|
||||
return get_user_entities_for_read(Reaction, user).order_by("name").all()
|
||||
|
||||
|
||||
@router.get(
|
||||
"/package/{uuid:package_uuid}/reaction/",
|
||||
response=EnhancedPageNumberPagination.Output[ReactionOutSchema],
|
||||
)
|
||||
@paginate(
|
||||
EnhancedPageNumberPagination,
|
||||
page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE,
|
||||
filter_schema=ReviewStatusFilter,
|
||||
)
|
||||
def list_package_reactions(request, package_uuid: UUID):
|
||||
"""
|
||||
List all reactions for a specific package.
|
||||
"""
|
||||
user = request.user
|
||||
return get_package_entities_for_read(Reaction, package_uuid, user).order_by("name").all()
|
||||
42
epapi/v1/endpoints/rules.py
Normal file
42
epapi/v1/endpoints/rules.py
Normal file
@ -0,0 +1,42 @@
|
||||
from django.conf import settings as s
|
||||
from ninja import Router
|
||||
from ninja_extra.pagination import paginate
|
||||
from uuid import UUID
|
||||
|
||||
from epdb.models import Rule
|
||||
from ..pagination import EnhancedPageNumberPagination
|
||||
from ..schemas import ReviewStatusFilter, RuleOutSchema
|
||||
from ..dal import get_user_entities_for_read, get_package_entities_for_read
|
||||
|
||||
router = Router()
|
||||
|
||||
|
||||
@router.get("/rules/", response=EnhancedPageNumberPagination.Output[RuleOutSchema])
|
||||
@paginate(
|
||||
EnhancedPageNumberPagination,
|
||||
page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE,
|
||||
filter_schema=ReviewStatusFilter,
|
||||
)
|
||||
def list_all_rules(request):
|
||||
"""
|
||||
List all rules from reviewed packages.
|
||||
"""
|
||||
user = request.user
|
||||
return get_user_entities_for_read(Rule, user).order_by("name").all()
|
||||
|
||||
|
||||
@router.get(
|
||||
"/package/{uuid:package_uuid}/rule/",
|
||||
response=EnhancedPageNumberPagination.Output[RuleOutSchema],
|
||||
)
|
||||
@paginate(
|
||||
EnhancedPageNumberPagination,
|
||||
page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE,
|
||||
filter_schema=ReviewStatusFilter,
|
||||
)
|
||||
def list_package_rules(request, package_uuid: UUID):
|
||||
"""
|
||||
List all rules for a specific package.
|
||||
"""
|
||||
user = request.user
|
||||
return get_package_entities_for_read(Rule, package_uuid, user).order_by("name").all()
|
||||
130
epapi/v1/endpoints/scenarios.py
Normal file
130
epapi/v1/endpoints/scenarios.py
Normal file
@ -0,0 +1,130 @@
|
||||
from django.conf import settings as s
|
||||
from django.db import IntegrityError, OperationalError, DatabaseError
|
||||
from ninja import Router, Body
|
||||
from ninja.errors import HttpError
|
||||
from ninja_extra.pagination import paginate
|
||||
from uuid import UUID
|
||||
from pydantic import ValidationError
|
||||
import logging
|
||||
import json
|
||||
|
||||
from epdb.models import Scenario
|
||||
from epdb.logic import PackageManager
|
||||
from epdb.views import _anonymous_or_real
|
||||
from ..pagination import EnhancedPageNumberPagination
|
||||
from ..schemas import (
|
||||
ScenarioOutSchema,
|
||||
ScenarioCreateSchema,
|
||||
ScenarioReviewStatusAndRelatedFilter,
|
||||
)
|
||||
from ..dal import get_user_entities_for_read, get_package_entities_for_read
|
||||
from envipy_additional_information import registry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = Router()
|
||||
|
||||
|
||||
@router.get("/scenarios/", response=EnhancedPageNumberPagination.Output[ScenarioOutSchema])
|
||||
@paginate(
|
||||
EnhancedPageNumberPagination,
|
||||
page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE,
|
||||
filter_schema=ScenarioReviewStatusAndRelatedFilter,
|
||||
)
|
||||
def list_all_scenarios(request):
|
||||
user = request.user
|
||||
items = get_user_entities_for_read(Scenario, user)
|
||||
return items.order_by("name").all()
|
||||
|
||||
|
||||
@router.get(
|
||||
"/package/{uuid:package_uuid}/scenario/",
|
||||
response=EnhancedPageNumberPagination.Output[ScenarioOutSchema],
|
||||
)
|
||||
@paginate(
|
||||
EnhancedPageNumberPagination,
|
||||
page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE,
|
||||
filter_schema=ScenarioReviewStatusAndRelatedFilter,
|
||||
)
|
||||
def list_package_scenarios(request, package_uuid: UUID):
|
||||
user = request.user
|
||||
items = get_package_entities_for_read(Scenario, package_uuid, user)
|
||||
return items.order_by("name").all()
|
||||
|
||||
|
||||
@router.post("/package/{uuid:package_uuid}/scenario/", response=ScenarioOutSchema)
|
||||
def create_scenario(request, package_uuid: UUID, payload: ScenarioCreateSchema = Body(...)):
|
||||
"""Create a new scenario with optional additional information."""
|
||||
user = _anonymous_or_real(request)
|
||||
|
||||
try:
|
||||
current_package = PackageManager.get_package_by_id(user, package_uuid)
|
||||
except ValueError as e:
|
||||
error_msg = str(e)
|
||||
if "does not exist" in error_msg:
|
||||
raise HttpError(404, f"Package not found: {package_uuid}")
|
||||
elif "Insufficient permissions" in error_msg:
|
||||
raise HttpError(403, "You do not have permission to access this package")
|
||||
else:
|
||||
logger.error(f"Unexpected ValueError from get_package_by_id: {error_msg}")
|
||||
raise HttpError(400, "Invalid package request")
|
||||
|
||||
# Build additional information models from payload
|
||||
additional_information_models = []
|
||||
validation_errors = []
|
||||
|
||||
for ai_item in payload.additional_information:
|
||||
# Get model class from registry
|
||||
model_cls = registry.get_model(ai_item.type.lower())
|
||||
if not model_cls:
|
||||
validation_errors.append(f"Unknown additional information type: {ai_item.type}")
|
||||
continue
|
||||
|
||||
try:
|
||||
# Validate and create model instance
|
||||
instance = model_cls(**ai_item.data)
|
||||
additional_information_models.append(instance)
|
||||
except ValidationError as e:
|
||||
# Collect validation errors to return to user
|
||||
error_messages = [err.get("msg", "Validation error") for err in e.errors()]
|
||||
validation_errors.append(f"{ai_item.type}: {', '.join(error_messages)}")
|
||||
except (TypeError, AttributeError, KeyError) as e:
|
||||
logger.warning(f"Failed to instantiate {ai_item.type} model: {str(e)}")
|
||||
validation_errors.append(f"{ai_item.type}: Invalid data structure - {str(e)}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error instantiating {ai_item.type}: {str(e)}")
|
||||
validation_errors.append(f"{ai_item.type}: Failed to process - please check your data")
|
||||
|
||||
# If there are validation errors, return them
|
||||
if validation_errors:
|
||||
raise HttpError(
|
||||
400,
|
||||
json.dumps(
|
||||
{
|
||||
"error": "Validation errors in additional information",
|
||||
"details": validation_errors,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
# Create scenario using the existing Scenario.create method
|
||||
try:
|
||||
new_scenario = Scenario.create(
|
||||
package=current_package,
|
||||
name=payload.name,
|
||||
description=payload.description,
|
||||
scenario_date=payload.scenario_date,
|
||||
scenario_type=payload.scenario_type,
|
||||
additional_information=additional_information_models,
|
||||
)
|
||||
except IntegrityError as e:
|
||||
logger.error(f"Database integrity error creating scenario: {str(e)}")
|
||||
raise HttpError(400, "Scenario creation failed - data constraint violation")
|
||||
except OperationalError as e:
|
||||
logger.error(f"Database operational error creating scenario: {str(e)}")
|
||||
raise HttpError(503, "Database temporarily unavailable - please try again")
|
||||
except (DatabaseError, AttributeError) as e:
|
||||
logger.error(f"Error creating scenario: {str(e)}")
|
||||
raise HttpError(500, "Failed to create scenario due to database error")
|
||||
|
||||
return new_scenario
|
||||
23
epapi/v1/endpoints/settings.py
Normal file
23
epapi/v1/endpoints/settings.py
Normal file
@ -0,0 +1,23 @@
|
||||
from django.conf import settings as s
|
||||
from ninja import Router
|
||||
from ninja_extra.pagination import paginate
|
||||
|
||||
from epdb.logic import SettingManager
|
||||
|
||||
from ..pagination import EnhancedPageNumberPagination
|
||||
from ..schemas import SettingOutSchema
|
||||
|
||||
router = Router()
|
||||
|
||||
|
||||
@router.get("/settings/", response=EnhancedPageNumberPagination.Output[SettingOutSchema])
|
||||
@paginate(
|
||||
EnhancedPageNumberPagination,
|
||||
page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE,
|
||||
)
|
||||
def list_all_pathways(request):
|
||||
"""
|
||||
List all pathways from reviewed packages.
|
||||
"""
|
||||
user = request.user
|
||||
return SettingManager.get_all_settings(user)
|
||||
50
epapi/v1/endpoints/structure.py
Normal file
50
epapi/v1/endpoints/structure.py
Normal file
@ -0,0 +1,50 @@
|
||||
from django.conf import settings as s
|
||||
from ninja import Router
|
||||
from ninja_extra.pagination import paginate
|
||||
from uuid import UUID
|
||||
|
||||
from ..pagination import EnhancedPageNumberPagination
|
||||
from ..schemas import CompoundStructureOutSchema, StructureReviewStatusFilter
|
||||
from ..dal import (
|
||||
get_user_structure_for_read,
|
||||
get_package_compound_structure_for_read,
|
||||
)
|
||||
|
||||
router = Router()
|
||||
|
||||
|
||||
@router.get(
|
||||
"/structures/", response=EnhancedPageNumberPagination.Output[CompoundStructureOutSchema]
|
||||
)
|
||||
@paginate(
|
||||
EnhancedPageNumberPagination,
|
||||
page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE,
|
||||
filter_schema=StructureReviewStatusFilter,
|
||||
)
|
||||
def list_all_structures(request):
|
||||
"""
|
||||
List all structures from all packages.
|
||||
"""
|
||||
user = request.user
|
||||
return get_user_structure_for_read(user).order_by("name").all()
|
||||
|
||||
|
||||
@router.get(
|
||||
"/package/{uuid:package_uuid}/compound/{uuid:compound_uuid}/structure/",
|
||||
response=EnhancedPageNumberPagination.Output[CompoundStructureOutSchema],
|
||||
)
|
||||
@paginate(
|
||||
EnhancedPageNumberPagination,
|
||||
page_size=s.API_PAGINATION_DEFAULT_PAGE_SIZE,
|
||||
filter_schema=StructureReviewStatusFilter,
|
||||
)
|
||||
def list_package_structures(request, package_uuid: UUID, compound_uuid: UUID):
|
||||
"""
|
||||
List all structures for a specific package and compound.
|
||||
"""
|
||||
user = request.user
|
||||
return (
|
||||
get_package_compound_structure_for_read(package_uuid, compound_uuid, user)
|
||||
.order_by("name")
|
||||
.all()
|
||||
)
|
||||
28
epapi/v1/errors.py
Normal file
28
epapi/v1/errors.py
Normal file
@ -0,0 +1,28 @@
|
||||
from ninja.errors import HttpError
|
||||
|
||||
|
||||
class EPAPIError(HttpError):
|
||||
status_code: int = 500
|
||||
|
||||
def __init__(self, message: str) -> None:
|
||||
super().__init__(status_code=self.status_code, message=message)
|
||||
|
||||
@classmethod
|
||||
def from_exception(cls, exc: Exception):
|
||||
return cls(message=str(exc))
|
||||
|
||||
|
||||
class EPAPIUnauthorizedError(EPAPIError):
|
||||
status_code = 401
|
||||
|
||||
|
||||
class EPAPIPermissionDeniedError(EPAPIError):
|
||||
status_code = 403
|
||||
|
||||
|
||||
class EPAPINotFoundError(EPAPIError):
|
||||
status_code = 404
|
||||
|
||||
|
||||
class EPAPIValidationError(EPAPIError):
|
||||
status_code = 422
|
||||
60
epapi/v1/pagination.py
Normal file
60
epapi/v1/pagination.py
Normal file
@ -0,0 +1,60 @@
|
||||
import math
|
||||
from typing import Any, Generic, List, TypeVar
|
||||
|
||||
from django.db.models import QuerySet
|
||||
from ninja import Schema
|
||||
from ninja.pagination import PageNumberPagination
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class EnhancedPageNumberPagination(PageNumberPagination):
|
||||
class Output(Schema, Generic[T]):
|
||||
items: List[T]
|
||||
page: int
|
||||
page_size: int
|
||||
total_items: int
|
||||
total_pages: int
|
||||
|
||||
def paginate_queryset(
|
||||
self,
|
||||
queryset: QuerySet,
|
||||
pagination: PageNumberPagination.Input,
|
||||
**params: Any,
|
||||
) -> Any:
|
||||
page_size = self._get_page_size(pagination.page_size)
|
||||
offset = (pagination.page - 1) * page_size
|
||||
total_items = self._items_count(queryset)
|
||||
total_pages = math.ceil(total_items / page_size) if page_size > 0 else 0
|
||||
|
||||
return {
|
||||
"items": queryset[offset : offset + page_size],
|
||||
"page": pagination.page,
|
||||
"page_size": page_size,
|
||||
"total_items": total_items,
|
||||
"total_pages": total_pages,
|
||||
}
|
||||
|
||||
async def apaginate_queryset(
|
||||
self,
|
||||
queryset: QuerySet,
|
||||
pagination: PageNumberPagination.Input,
|
||||
**params: Any,
|
||||
) -> Any:
|
||||
page_size = self._get_page_size(pagination.page_size)
|
||||
offset = (pagination.page - 1) * page_size
|
||||
total_items = await self._aitems_count(queryset)
|
||||
total_pages = math.ceil(total_items / page_size) if page_size > 0 else 0
|
||||
|
||||
if isinstance(queryset, QuerySet):
|
||||
items = [obj async for obj in queryset[offset : offset + page_size]]
|
||||
else:
|
||||
items = queryset[offset : offset + page_size]
|
||||
|
||||
return {
|
||||
"items": items,
|
||||
"page": pagination.page,
|
||||
"page_size": page_size,
|
||||
"total_items": total_items,
|
||||
"total_pages": total_pages,
|
||||
}
|
||||
36
epapi/v1/router.py
Normal file
36
epapi/v1/router.py
Normal file
@ -0,0 +1,36 @@
|
||||
from ninja import Router
|
||||
from ninja.security import SessionAuth
|
||||
|
||||
from .auth import BearerTokenAuth
|
||||
from .endpoints import (
|
||||
packages,
|
||||
scenarios,
|
||||
compounds,
|
||||
rules,
|
||||
reactions,
|
||||
pathways,
|
||||
models,
|
||||
structure,
|
||||
additional_information,
|
||||
settings,
|
||||
)
|
||||
|
||||
# Main router with authentication
|
||||
router = Router(
|
||||
auth=[
|
||||
SessionAuth(),
|
||||
BearerTokenAuth(),
|
||||
]
|
||||
)
|
||||
|
||||
# Include all endpoint routers
|
||||
router.add_router("", packages.router)
|
||||
router.add_router("", scenarios.router)
|
||||
router.add_router("", compounds.router)
|
||||
router.add_router("", rules.router)
|
||||
router.add_router("", reactions.router)
|
||||
router.add_router("", pathways.router)
|
||||
router.add_router("", models.router)
|
||||
router.add_router("", structure.router)
|
||||
router.add_router("", additional_information.router)
|
||||
router.add_router("", settings.router)
|
||||
134
epapi/v1/schemas.py
Normal file
134
epapi/v1/schemas.py
Normal file
@ -0,0 +1,134 @@
|
||||
from ninja import FilterSchema, FilterLookup, Schema
|
||||
from typing import Annotated, Optional, List, Dict, Any
|
||||
from uuid import UUID
|
||||
|
||||
|
||||
# Filter schema for query parameters
|
||||
class ReviewStatusFilter(FilterSchema):
|
||||
"""Filter schema for review_status query parameter."""
|
||||
|
||||
review_status: Annotated[Optional[bool], FilterLookup("package__reviewed")] = None
|
||||
|
||||
|
||||
class SelfReviewStatusFilter(FilterSchema):
|
||||
"""Filter schema for review_status query parameter on self-reviewed entities."""
|
||||
|
||||
review_status: Annotated[Optional[bool], FilterLookup("reviewed")] = None
|
||||
|
||||
|
||||
class StructureReviewStatusFilter(FilterSchema):
|
||||
"""Filter schema for review_status on structures (via compound->package)."""
|
||||
|
||||
review_status: Annotated[Optional[bool], FilterLookup("compound__package__reviewed")] = None
|
||||
|
||||
|
||||
class ScenarioReviewStatusAndRelatedFilter(ReviewStatusFilter):
|
||||
"""Filter schema for review_status and parent query parameter."""
|
||||
|
||||
exclude_related: Annotated[Optional[bool], FilterLookup("parent__isnull")] = None
|
||||
|
||||
|
||||
# Base schema for all package-scoped entities
|
||||
class PackageEntityOutSchema(Schema):
|
||||
"""Base schema for entities belonging to a package."""
|
||||
|
||||
uuid: UUID
|
||||
url: str = ""
|
||||
name: str
|
||||
description: str
|
||||
review_status: str = ""
|
||||
package: str = ""
|
||||
|
||||
@staticmethod
|
||||
def resolve_url(obj):
|
||||
return obj.url
|
||||
|
||||
@staticmethod
|
||||
def resolve_package(obj):
|
||||
return obj.package.url
|
||||
|
||||
@staticmethod
|
||||
def resolve_review_status(obj):
|
||||
return "reviewed" if obj.package.reviewed else "unreviewed"
|
||||
|
||||
|
||||
# All package-scoped entities inherit from base
|
||||
class ScenarioOutSchema(PackageEntityOutSchema):
|
||||
pass
|
||||
|
||||
|
||||
class AdditionalInformationItemSchema(Schema):
|
||||
"""Schema for additional information item in scenario creation."""
|
||||
|
||||
type: str
|
||||
data: Dict[str, Any]
|
||||
|
||||
|
||||
class ScenarioCreateSchema(Schema):
|
||||
"""Schema for creating a new scenario."""
|
||||
|
||||
name: str
|
||||
description: str = ""
|
||||
scenario_date: str = "No date"
|
||||
scenario_type: str = "Not specified"
|
||||
additional_information: List[AdditionalInformationItemSchema] = []
|
||||
|
||||
|
||||
class CompoundOutSchema(PackageEntityOutSchema):
|
||||
pass
|
||||
|
||||
|
||||
class RuleOutSchema(PackageEntityOutSchema):
|
||||
pass
|
||||
|
||||
|
||||
class ReactionOutSchema(PackageEntityOutSchema):
|
||||
pass
|
||||
|
||||
|
||||
class PathwayOutSchema(PackageEntityOutSchema):
|
||||
pass
|
||||
|
||||
|
||||
class ModelOutSchema(PackageEntityOutSchema):
|
||||
pass
|
||||
|
||||
|
||||
class CompoundStructureOutSchema(PackageEntityOutSchema):
|
||||
compound: str = ""
|
||||
|
||||
@staticmethod
|
||||
def resolve_compound(obj):
|
||||
return obj.compound.url
|
||||
|
||||
@staticmethod
|
||||
def resolve_package(obj):
|
||||
return obj.compound.package.url
|
||||
|
||||
@staticmethod
|
||||
def resolve_review_status(obj):
|
||||
return "reviewed" if obj.compound.package.reviewed else "unreviewed"
|
||||
|
||||
|
||||
# Package is special (no package FK)
|
||||
class PackageOutSchema(Schema):
|
||||
uuid: UUID
|
||||
url: str = ""
|
||||
name: str
|
||||
description: str
|
||||
review_status: str = ""
|
||||
|
||||
@staticmethod
|
||||
def resolve_url(obj):
|
||||
return obj.url
|
||||
|
||||
@staticmethod
|
||||
def resolve_review_status(obj):
|
||||
return "reviewed" if obj.reviewed else "unreviewed"
|
||||
|
||||
|
||||
class SettingOutSchema(Schema):
|
||||
uuid: UUID
|
||||
url: str = ""
|
||||
name: str
|
||||
description: str
|
||||
@ -1,31 +1,34 @@
|
||||
from django.conf import settings as s
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import (
|
||||
User,
|
||||
UserPackagePermission,
|
||||
Group,
|
||||
GroupPackagePermission,
|
||||
Package,
|
||||
MLRelativeReasoning,
|
||||
EnviFormer,
|
||||
Compound,
|
||||
CompoundStructure,
|
||||
SimpleAmbitRule,
|
||||
ParallelRule,
|
||||
Reaction,
|
||||
Pathway,
|
||||
Node,
|
||||
Edge,
|
||||
Scenario,
|
||||
Setting,
|
||||
EnviFormer,
|
||||
ExternalDatabase,
|
||||
ExternalIdentifier,
|
||||
Group,
|
||||
GroupPackagePermission,
|
||||
JobLog,
|
||||
License,
|
||||
MLRelativeReasoning,
|
||||
Node,
|
||||
ParallelRule,
|
||||
Pathway,
|
||||
Reaction,
|
||||
Scenario,
|
||||
Setting,
|
||||
SimpleAmbitRule,
|
||||
User,
|
||||
UserPackagePermission,
|
||||
)
|
||||
|
||||
Package = s.GET_PACKAGE_MODEL()
|
||||
|
||||
|
||||
class UserAdmin(admin.ModelAdmin):
|
||||
list_display = ["username", "email", "is_active"]
|
||||
list_display = ["username", "email", "is_active", "is_staff", "is_superuser"]
|
||||
|
||||
|
||||
class UserPackagePermissionAdmin(admin.ModelAdmin):
|
||||
@ -45,7 +48,7 @@ class JobLogAdmin(admin.ModelAdmin):
|
||||
|
||||
|
||||
class EPAdmin(admin.ModelAdmin):
|
||||
search_fields = ["name", "description"]
|
||||
search_fields = ["name", "description", "url", "uuid"]
|
||||
list_display = ["name", "url", "created"]
|
||||
ordering = ["-created"]
|
||||
|
||||
@ -62,6 +65,10 @@ class EnviFormerAdmin(EPAdmin):
|
||||
pass
|
||||
|
||||
|
||||
class LicenseAdmin(admin.ModelAdmin):
|
||||
list_display = ["cc_string", "link", "image_link"]
|
||||
|
||||
|
||||
class CompoundAdmin(EPAdmin):
|
||||
pass
|
||||
|
||||
@ -118,6 +125,7 @@ admin.site.register(JobLog, JobLogAdmin)
|
||||
admin.site.register(Package, PackageAdmin)
|
||||
admin.site.register(MLRelativeReasoning, MLRelativeReasoningAdmin)
|
||||
admin.site.register(EnviFormer, EnviFormerAdmin)
|
||||
admin.site.register(License, LicenseAdmin)
|
||||
admin.site.register(Compound, CompoundAdmin)
|
||||
admin.site.register(CompoundStructure, CompoundStructureAdmin)
|
||||
admin.site.register(SimpleAmbitRule, SimpleAmbitRuleAdmin)
|
||||
|
||||
14
epdb/api.py
14
epdb/api.py
@ -2,20 +2,12 @@ from typing import List
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from ninja import Router, Schema, Field
|
||||
from ninja.errors import HttpError
|
||||
from ninja.pagination import paginate
|
||||
from ninja.security import HttpBearer
|
||||
|
||||
from epapi.v1.auth import BearerTokenAuth
|
||||
|
||||
from .logic import PackageManager
|
||||
from .models import User, Compound, APIToken
|
||||
|
||||
|
||||
class BearerTokenAuth(HttpBearer):
|
||||
def authenticate(self, request, token):
|
||||
for token_obj in APIToken.objects.select_related("user").all():
|
||||
if token_obj.check_token(token) and token_obj.is_valid():
|
||||
return token_obj.user
|
||||
raise HttpError(401, "Invalid or expired token")
|
||||
from .models import User, Compound
|
||||
|
||||
|
||||
def _anonymous_or_real(request):
|
||||
|
||||
@ -1,4 +1,9 @@
|
||||
import logging
|
||||
|
||||
from django.apps import AppConfig
|
||||
from django.conf import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EPDBConfig(AppConfig):
|
||||
@ -7,3 +12,6 @@ class EPDBConfig(AppConfig):
|
||||
|
||||
def ready(self):
|
||||
import epdb.signals # noqa: F401
|
||||
|
||||
model_name = getattr(settings, "EPDB_PACKAGE_MODEL", "epdb.Package")
|
||||
logger.info(f"Using Package model: {model_name}")
|
||||
|
||||
32
epdb/context_processors.py
Normal file
32
epdb/context_processors.py
Normal file
@ -0,0 +1,32 @@
|
||||
"""
|
||||
Context processors for enviPy application.
|
||||
|
||||
Context processors automatically make variables available to all templates.
|
||||
"""
|
||||
|
||||
from .logic import PackageManager
|
||||
from django.conf import settings as s
|
||||
|
||||
|
||||
def package_context(request):
|
||||
"""
|
||||
Provides package data for the search modal which is included globally
|
||||
in framework_modern.html.
|
||||
|
||||
Returns:
|
||||
dict: Context dictionary with reviewed and unreviewed packages
|
||||
"""
|
||||
current_user = request.user
|
||||
|
||||
reviewed_package_qs = PackageManager.get_reviewed_packages()
|
||||
|
||||
unreviewed_package_qs = s.GET_PACKAGE_MODEL().objects.none()
|
||||
|
||||
# Only get user-specific packages if user is authenticated
|
||||
if current_user.is_authenticated:
|
||||
unreviewed_package_qs = PackageManager.get_all_readable_packages(current_user)
|
||||
|
||||
return {
|
||||
"reviewed_packages": reviewed_package_qs,
|
||||
"unreviewed_packages": unreviewed_package_qs,
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
345
epdb/logic.py
345
epdb/logic.py
@ -1,38 +1,41 @@
|
||||
import re
|
||||
import logging
|
||||
import json
|
||||
from typing import Union, List, Optional, Set, Dict, Any
|
||||
import logging
|
||||
import re
|
||||
from typing import Any, Dict, List, Optional, Set, Union, Tuple
|
||||
from uuid import UUID
|
||||
|
||||
import nh3
|
||||
from django.conf import settings as s
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.db import transaction
|
||||
from django.conf import settings as s
|
||||
from pydantic import ValidationError
|
||||
|
||||
from epdb.models import (
|
||||
User,
|
||||
Package,
|
||||
UserPackagePermission,
|
||||
GroupPackagePermission,
|
||||
Permission,
|
||||
Group,
|
||||
Setting,
|
||||
EPModel,
|
||||
UserSettingPermission,
|
||||
Rule,
|
||||
Pathway,
|
||||
Node,
|
||||
Edge,
|
||||
Compound,
|
||||
Reaction,
|
||||
CompoundStructure,
|
||||
Edge,
|
||||
EnzymeLink,
|
||||
EPModel,
|
||||
ExpansionSchemeChoice,
|
||||
Group,
|
||||
GroupPackagePermission,
|
||||
Node,
|
||||
Pathway,
|
||||
Permission,
|
||||
Reaction,
|
||||
Rule,
|
||||
Setting,
|
||||
User,
|
||||
UserPackagePermission,
|
||||
UserSettingPermission,
|
||||
)
|
||||
from utilities.chem import FormatConverter
|
||||
from utilities.misc import PackageImporter, PackageExporter
|
||||
from utilities.misc import PackageExporter, PackageImporter
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
Package = s.GET_PACKAGE_MODEL()
|
||||
|
||||
|
||||
class EPDBURLParser:
|
||||
UUID_PATTERN = r"[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}"
|
||||
@ -185,8 +188,12 @@ class UserManager(object):
|
||||
def create_user(
|
||||
username, email, password, set_setting=True, add_to_group=True, *args, **kwargs
|
||||
):
|
||||
# avoid circular import :S
|
||||
from .tasks import send_registration_mail
|
||||
# Clean for potential XSS
|
||||
clean_username = nh3.clean(username).strip()
|
||||
clean_email = nh3.clean(email).strip()
|
||||
if clean_username != username or clean_email != email:
|
||||
# This will be caught by the try in view.py/register
|
||||
raise ValueError("Invalid username or password")
|
||||
|
||||
extra_fields = {"is_active": not s.ADMIN_APPROVAL_REQUIRED}
|
||||
|
||||
@ -205,10 +212,6 @@ class UserManager(object):
|
||||
u.default_package = p
|
||||
u.save()
|
||||
|
||||
if not u.is_active:
|
||||
# send email for verification
|
||||
send_registration_mail.delay(u.pk)
|
||||
|
||||
if set_setting:
|
||||
u.default_setting = Setting.objects.get(global_default=True)
|
||||
u.save()
|
||||
@ -262,8 +265,9 @@ class GroupManager(object):
|
||||
@staticmethod
|
||||
def create_group(current_user, name, description):
|
||||
g = Group()
|
||||
g.name = name
|
||||
g.description = description
|
||||
# Clean for potential XSS
|
||||
g.name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
g.description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
g.owner = current_user
|
||||
g.save()
|
||||
|
||||
@ -434,6 +438,7 @@ class PackageManager(object):
|
||||
if PackageManager.readable(user, p):
|
||||
return p
|
||||
else:
|
||||
# FIXME: use custom exception to be translatable to 403 in API
|
||||
raise ValueError(
|
||||
"Insufficient permissions to access Package with ID {}".format(package_id)
|
||||
)
|
||||
@ -518,8 +523,13 @@ class PackageManager(object):
|
||||
@transaction.atomic
|
||||
def create_package(current_user, name: str, description: str = None):
|
||||
p = Package()
|
||||
p.name = name
|
||||
p.description = description
|
||||
|
||||
# Clean for potential XSS
|
||||
p.name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
if description is not None and description.strip() != "":
|
||||
p.description = nh3.clean(description.strip(), tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
|
||||
p.save()
|
||||
|
||||
up = UserPackagePermission()
|
||||
@ -565,30 +575,39 @@ class PackageManager(object):
|
||||
else:
|
||||
_ = perm_cls.objects.update_or_create(defaults={"permission": new_perm}, **data)
|
||||
|
||||
@staticmethod
|
||||
def grant_read(caller: User, package: Package, grantee: Union[User, Group]):
|
||||
PackageManager.update_permissions(caller, package, grantee, Permission.READ[0])
|
||||
|
||||
@staticmethod
|
||||
def grant_write(caller: User, package: Package, grantee: Union[User, Group]):
|
||||
PackageManager.update_permissions(caller, package, grantee, Permission.WRITE[0])
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def import_legacy_package(
|
||||
data: dict, owner: User, keep_ids=False, add_import_timestamp=True, trust_reviewed=False
|
||||
):
|
||||
from uuid import UUID, uuid4
|
||||
from datetime import datetime
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from envipy_additional_information import AdditionalInformationConverter
|
||||
|
||||
from .models import (
|
||||
Package,
|
||||
Compound,
|
||||
CompoundStructure,
|
||||
SimpleRule,
|
||||
SimpleAmbitRule,
|
||||
Edge,
|
||||
Node,
|
||||
ParallelRule,
|
||||
Pathway,
|
||||
Reaction,
|
||||
Scenario,
|
||||
SequentialRule,
|
||||
SequentialRuleOrdering,
|
||||
Reaction,
|
||||
Pathway,
|
||||
Node,
|
||||
Edge,
|
||||
Scenario,
|
||||
SimpleAmbitRule,
|
||||
SimpleRule,
|
||||
)
|
||||
from envipy_additional_information import AdditionalInformationConverter
|
||||
|
||||
pack = Package()
|
||||
pack.uuid = UUID(data["id"].split("/")[-1]) if keep_ids else uuid4()
|
||||
@ -660,7 +679,7 @@ class PackageManager(object):
|
||||
ai_data = json.loads(res.model_dump_json())
|
||||
ai_data["uuid"] = f"{uuid4()}"
|
||||
new_add_inf[res_cls_name].append(ai_data)
|
||||
except ValidationError:
|
||||
except (ValidationError, ValueError):
|
||||
logger.error(f"Failed to convert {name} with {addinf_data}")
|
||||
|
||||
scen.additional_information = new_add_inf
|
||||
@ -1093,29 +1112,31 @@ class SettingManager(object):
|
||||
rule_packages: List[Package] = None,
|
||||
model: EPModel = None,
|
||||
model_threshold: float = None,
|
||||
expansion_scheme: ExpansionSchemeChoice = ExpansionSchemeChoice.BFS,
|
||||
):
|
||||
s = Setting()
|
||||
s.name = name
|
||||
s.description = description
|
||||
s.max_nodes = max_nodes
|
||||
s.max_depth = max_depth
|
||||
s.model = model
|
||||
s.model_threshold = model_threshold
|
||||
new_s = Setting()
|
||||
# Clean for potential XSS
|
||||
new_s.name = nh3.clean(name, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
new_s.description = nh3.clean(description, tags=s.ALLOWED_HTML_TAGS).strip()
|
||||
new_s.max_nodes = max_nodes
|
||||
new_s.max_depth = max_depth
|
||||
new_s.model = model
|
||||
new_s.model_threshold = model_threshold
|
||||
|
||||
s.save()
|
||||
new_s.save()
|
||||
|
||||
if rule_packages is not None:
|
||||
for r in rule_packages:
|
||||
s.rule_packages.add(r)
|
||||
s.save()
|
||||
new_s.rule_packages.add(r)
|
||||
new_s.save()
|
||||
|
||||
usp = UserSettingPermission()
|
||||
usp.user = user
|
||||
usp.setting = s
|
||||
usp.setting = new_s
|
||||
usp.permission = Permission.ALL[0]
|
||||
usp.save()
|
||||
|
||||
return s
|
||||
return new_s
|
||||
|
||||
@staticmethod
|
||||
def get_default_setting(user: User):
|
||||
@ -1374,6 +1395,9 @@ class SEdge(object):
|
||||
self.rule = rule
|
||||
self.probability = probability
|
||||
|
||||
def product_smiles(self):
|
||||
return [p.smiles for p in self.products]
|
||||
|
||||
def __hash__(self):
|
||||
full_hash = 0
|
||||
|
||||
@ -1459,6 +1483,7 @@ class SPathway(object):
|
||||
self.smiles_to_node: Dict[str, SNode] = dict(**{n.smiles: n for n in self.root_nodes})
|
||||
self.edges: Set["SEdge"] = set()
|
||||
self.done = False
|
||||
self.empty_due_to_threshold = False
|
||||
|
||||
@staticmethod
|
||||
def from_pathway(pw: "Pathway", persist: bool = True):
|
||||
@ -1523,33 +1548,45 @@ class SPathway(object):
|
||||
|
||||
return sorted(res, key=lambda x: hash(x))
|
||||
|
||||
def predict_step(self, from_depth: int = None, from_node: "Node" = None):
|
||||
substrates: List[SNode] = []
|
||||
def _expand(self, substrates: List[SNode]) -> Tuple[List[SNode], List[SEdge]]:
|
||||
"""
|
||||
Expands the given substrates by generating new nodes and edges based on prediction settings.
|
||||
|
||||
if from_depth is not None:
|
||||
substrates = self._get_nodes_for_depth(from_depth)
|
||||
elif from_node is not None:
|
||||
for k, v in self.snode_persist_lookup.items():
|
||||
if from_node == v:
|
||||
substrates = [k]
|
||||
break
|
||||
else:
|
||||
raise ValueError("Neither from_depth nor from_node_url specified")
|
||||
This method processes a list of substrates and expands them into new nodes and edges using defined
|
||||
rules and settings. It evaluates each substrate to determine its applicability domain, persists
|
||||
domain assessments, and generates candidates for further processing. Newly created nodes and edges
|
||||
are returned, and any applicable information is stored or updated internally during the process.
|
||||
|
||||
Parameters:
|
||||
substrates (List[SNode]): A list of substrate nodes to be expanded.
|
||||
|
||||
Returns:
|
||||
Tuple[List[SNode], List[SEdge]]:
|
||||
A tuple containing:
|
||||
- A list of new nodes generated during the expansion.
|
||||
- A list of new edges representing connections between nodes based on candidate reactions.
|
||||
|
||||
Raises:
|
||||
ValueError: If a node does not have an ID when it should have been saved already.
|
||||
"""
|
||||
new_nodes: List[SNode] = []
|
||||
new_edges: List[SEdge] = []
|
||||
|
||||
new_tp = False
|
||||
if substrates:
|
||||
for sub in substrates:
|
||||
# For App Domain we have to ensure that each Node is evaluated
|
||||
if sub.app_domain_assessment is None:
|
||||
if self.prediction_setting.model:
|
||||
if self.prediction_setting.model.app_domain:
|
||||
app_domain_assessment = self.prediction_setting.model.app_domain.assess(sub.smiles)
|
||||
app_domain_assessment = self.prediction_setting.model.app_domain.assess(
|
||||
sub.smiles
|
||||
)
|
||||
|
||||
if self.persist is not None:
|
||||
n = self.snode_persist_lookup[sub]
|
||||
|
||||
assert n.id is not None, (
|
||||
"Node has no id! Should have been saved already... aborting!"
|
||||
)
|
||||
if n.id is None:
|
||||
raise ValueError(f"Node {n} has no ID... aborting!")
|
||||
|
||||
node_data = n.simple_json()
|
||||
node_data["image"] = f"{n.url}?image=svg"
|
||||
app_domain_assessment["assessment"]["node"] = node_data
|
||||
@ -1559,11 +1596,25 @@ class SPathway(object):
|
||||
|
||||
sub.app_domain_assessment = app_domain_assessment
|
||||
|
||||
candidates = self.prediction_setting.expand(self, sub)
|
||||
expansion_result = self.prediction_setting.expand(self, sub)
|
||||
|
||||
# We don't have any substrate, but technically we have at least one rule that triggered.
|
||||
# If our substrate is a root node a.k.a. depth == 0 store that info in SPathway
|
||||
if (
|
||||
len(expansion_result["transformations"]) == 0
|
||||
and expansion_result["rule_triggered"]
|
||||
and sub.depth == 0
|
||||
):
|
||||
self.empty_due_to_threshold = True
|
||||
|
||||
# Emit directly
|
||||
if self.persist is not None:
|
||||
self.persist.kv["empty_due_to_threshold"] = True
|
||||
self.persist.save()
|
||||
|
||||
# candidates is a List of PredictionResult. The length of the List is equal to the number of rules
|
||||
for cand_set in candidates:
|
||||
for cand_set in expansion_result["transformations"]:
|
||||
if cand_set:
|
||||
new_tp = True
|
||||
# cand_set is a PredictionResult object that can consist of multiple candidate reactions
|
||||
for cand in cand_set:
|
||||
cand_nodes = []
|
||||
@ -1574,11 +1625,12 @@ class SPathway(object):
|
||||
app_domain_assessment = None
|
||||
if self.prediction_setting.model:
|
||||
if self.prediction_setting.model.app_domain:
|
||||
app_domain_assessment = (self.prediction_setting.model.app_domain.assess(c))
|
||||
|
||||
self.smiles_to_node[c] = SNode(
|
||||
c, sub.depth + 1, app_domain_assessment
|
||||
app_domain_assessment = (
|
||||
self.prediction_setting.model.app_domain.assess(c)
|
||||
)
|
||||
snode = SNode(c, sub.depth + 1, app_domain_assessment)
|
||||
self.smiles_to_node[c] = snode
|
||||
new_nodes.append(snode)
|
||||
|
||||
node = self.smiles_to_node[c]
|
||||
cand_nodes.append(node)
|
||||
@ -1590,6 +1642,132 @@ class SPathway(object):
|
||||
probability=cand_set.probability,
|
||||
)
|
||||
self.edges.add(edge)
|
||||
new_edges.append(edge)
|
||||
|
||||
return new_nodes, new_edges
|
||||
|
||||
def predict(self):
|
||||
"""
|
||||
Predicts outcomes based on a graph traversal algorithm using the specified expansion schema.
|
||||
|
||||
This method iteratively explores the nodes of a graph starting from the root nodes, propagating
|
||||
probabilities through edges, and updating the probabilities of the connected nodes. The traversal
|
||||
can follow one of three predefined expansion schemas: Depth-First Search (DFS), Breadth-First Search
|
||||
(BFS), or a Greedy approach based on node probabilities. The methodology ensures that all reachable
|
||||
nodes are processed systematically according to the specified schema.
|
||||
|
||||
Errors will be raised if the expansion schema is undefined or invalid. Additionally, this method
|
||||
supports persisting changes by writing back data to the database when configured to do so.
|
||||
|
||||
Attributes
|
||||
----------
|
||||
done : bool
|
||||
A flag indicating whether the prediction process is completed.
|
||||
persist : Any
|
||||
An optional object that manages persistence operations for saving modifications.
|
||||
root_nodes : List[SNode]
|
||||
A collection of initial nodes in the graph from which traversal begins.
|
||||
prediction_setting : Any
|
||||
Configuration object specifying settings for graph traversal, such as the choice of
|
||||
expansion schema.
|
||||
|
||||
Raises
|
||||
------
|
||||
ValueError
|
||||
If an invalid or unknown expansion schema is provided in `prediction_setting`.
|
||||
"""
|
||||
# populate initial queue
|
||||
queue = list(self.root_nodes)
|
||||
processed = set()
|
||||
|
||||
# initial nodes have prob 1.0
|
||||
node_probs: Dict[SNode, float] = {}
|
||||
node_probs.update({n: 1.0 for n in queue})
|
||||
|
||||
while queue:
|
||||
current = queue.pop(0)
|
||||
|
||||
if current in processed:
|
||||
continue
|
||||
|
||||
processed.add(current)
|
||||
|
||||
new_nodes, new_edges = self._expand([current])
|
||||
|
||||
if new_nodes or new_edges:
|
||||
# Check if we need to write back data to the database
|
||||
if self.persist:
|
||||
self._sync_to_pathway()
|
||||
# call save to update the internal modified field
|
||||
self.persist.save()
|
||||
|
||||
if new_nodes:
|
||||
for edge in new_edges:
|
||||
# All edge have `current` as educt
|
||||
# Use `current` and adjust probs
|
||||
current_prob = node_probs[current]
|
||||
|
||||
for prod in edge.products:
|
||||
# Either is a new product or a product and we found a path with a higher prob
|
||||
if (
|
||||
prod not in node_probs
|
||||
or current_prob * edge.probability > node_probs[prod]
|
||||
):
|
||||
node_probs[prod] = current_prob * edge.probability
|
||||
|
||||
# Update Queue to proceed
|
||||
if self.prediction_setting.expansion_scheme == "DFS":
|
||||
for n in new_nodes:
|
||||
if n not in processed:
|
||||
# We want to follow this path -> prepend queue
|
||||
queue.insert(0, n)
|
||||
elif self.prediction_setting.expansion_scheme == "BFS":
|
||||
for n in new_nodes:
|
||||
if n not in processed:
|
||||
# Add at the end, everything queued before will be processed
|
||||
# before new_nodese
|
||||
queue.append(n)
|
||||
elif self.prediction_setting.expansion_scheme == "GREEDY":
|
||||
# Simply add them, as we will re-order the queue later
|
||||
for n in new_nodes:
|
||||
if n not in processed:
|
||||
queue.append(n)
|
||||
|
||||
node_and_probs = []
|
||||
for queued_val in queue:
|
||||
node_and_probs.append((queued_val, node_probs[queued_val]))
|
||||
|
||||
# re-order the queue and only pick smiles
|
||||
queue = [
|
||||
n[0] for n in sorted(node_and_probs, key=lambda x: x[1], reverse=True)
|
||||
]
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Unknown expansion schema: {self.prediction_setting.expansion_scheme}"
|
||||
)
|
||||
|
||||
# Queue exhausted, we're done
|
||||
self.done = True
|
||||
|
||||
def predict_step(self, from_depth: int = None, from_node: "Node" = None):
|
||||
substrates: List[SNode] = []
|
||||
|
||||
if from_depth is not None:
|
||||
substrates = self._get_nodes_for_depth(from_depth)
|
||||
elif from_node is not None:
|
||||
for k, v in self.snode_persist_lookup.items():
|
||||
if from_node == v:
|
||||
substrates = [k]
|
||||
break
|
||||
else:
|
||||
raise ValueError(f"Node {from_node} not found in SPathway!")
|
||||
else:
|
||||
raise ValueError("Neither from_depth nor from_node_url specified")
|
||||
|
||||
new_tp = False
|
||||
if substrates:
|
||||
new_nodes, _ = self._expand(substrates)
|
||||
new_tp = len(new_nodes) > 0
|
||||
|
||||
# In case no substrates are found, we're done.
|
||||
# For "predict from node" we're always done
|
||||
@ -1602,6 +1780,14 @@ class SPathway(object):
|
||||
# call save to update the internal modified field
|
||||
self.persist.save()
|
||||
|
||||
def get_edge_for_educt_smiles(self, smiles: str) -> List[SEdge]:
|
||||
res = []
|
||||
for e in self.edges:
|
||||
for n in e.educts:
|
||||
if n.smiles == smiles:
|
||||
res.append(e)
|
||||
return res
|
||||
|
||||
def _sync_to_pathway(self) -> None:
|
||||
logger.info("Updating Pathway with SPathway")
|
||||
|
||||
@ -1665,11 +1851,6 @@ class SPathway(object):
|
||||
"to": to_indices,
|
||||
}
|
||||
|
||||
# if edge.rule:
|
||||
# e['rule'] = {
|
||||
# 'name': edge.rule.name,
|
||||
# 'id': edge.rule.url,
|
||||
# }
|
||||
edges.append(e)
|
||||
|
||||
return {
|
||||
|
||||
@ -8,14 +8,19 @@ from epdb.logic import UserManager, GroupManager, PackageManager, SettingManager
|
||||
from epdb.models import (
|
||||
UserSettingPermission,
|
||||
MLRelativeReasoning,
|
||||
EnviFormer,
|
||||
Permission,
|
||||
User,
|
||||
ExternalDatabase,
|
||||
License,
|
||||
)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"-ol", "--only-licenses", action="store_true", help="Only create licenses."
|
||||
)
|
||||
|
||||
def create_users(self):
|
||||
# Anonymous User
|
||||
if not User.objects.filter(email="anon@envipath.com").exists():
|
||||
@ -83,6 +88,17 @@ class Command(BaseCommand):
|
||||
|
||||
return anon, admin, g, user0
|
||||
|
||||
def create_licenses(self):
|
||||
"""Create the six default licenses supported by enviPath"""
|
||||
cc_strings = ["by", "by-nc", "by-nc-nd", "by-nc-sa", "by-nd", "by-sa"]
|
||||
for cc_string in cc_strings:
|
||||
if not License.objects.filter(cc_string=cc_string).exists():
|
||||
new_license = License()
|
||||
new_license.cc_string = cc_string
|
||||
new_license.link = f"https://creativecommons.org/licenses/{cc_string}/4.0/"
|
||||
new_license.image_link = f"https://licensebuttons.net/l/{cc_string}/4.0/88x31.png"
|
||||
new_license.save()
|
||||
|
||||
def import_package(self, data, owner):
|
||||
return PackageManager.import_legacy_package(
|
||||
data, owner, keep_ids=True, add_import_timestamp=False, trust_reviewed=True
|
||||
@ -157,6 +173,10 @@ class Command(BaseCommand):
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, *args, **options):
|
||||
# Create licenses
|
||||
self.create_licenses()
|
||||
if options.get("only_licenses", False):
|
||||
return
|
||||
# Create users
|
||||
anon, admin, g, user0 = self.create_users()
|
||||
|
||||
@ -210,7 +230,6 @@ class Command(BaseCommand):
|
||||
package=pack,
|
||||
rule_packages=[mapping["EAWAG-BBD"]],
|
||||
data_packages=[mapping["EAWAG-BBD"]],
|
||||
eval_packages=[],
|
||||
threshold=0.5,
|
||||
name="ECC - BBD - T0.5",
|
||||
description="ML Relative Reasoning",
|
||||
@ -218,7 +237,3 @@ class Command(BaseCommand):
|
||||
|
||||
ml_model.build_dataset()
|
||||
ml_model.build_model()
|
||||
|
||||
# If available, create EnviFormerModel
|
||||
if s.ENVIFORMER_PRESENT:
|
||||
EnviFormer.create(pack, "EnviFormer - T0.5", "EnviFormer Model with Threshold 0.5", 0.5)
|
||||
|
||||
92
epdb/management/commands/create_api_token.py
Normal file
92
epdb/management/commands/create_api_token.py
Normal file
@ -0,0 +1,92 @@
|
||||
from django.conf import settings as s
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
from epdb.models import APIToken
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Create an API token for a user"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--username",
|
||||
required=True,
|
||||
help="Username of the user who will own the token",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--name",
|
||||
required=True,
|
||||
help="Descriptive name for the token",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--expires-days",
|
||||
type=int,
|
||||
default=90,
|
||||
help="Days until expiration (0 for no expiration)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--inactive",
|
||||
action="store_true",
|
||||
help="Create the token as inactive",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--curl",
|
||||
action="store_true",
|
||||
help="Print a curl example using the token",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--base-url",
|
||||
default=None,
|
||||
help="Base URL for curl example (default SERVER_URL or http://localhost:8000)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--endpoint",
|
||||
default="/api/v1/compounds/",
|
||||
help="Endpoint path for curl example",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
username = options["username"]
|
||||
name = options["name"]
|
||||
expires_days = options["expires_days"]
|
||||
|
||||
if expires_days < 0:
|
||||
raise CommandError("--expires-days must be >= 0")
|
||||
|
||||
if expires_days == 0:
|
||||
expires_days = None
|
||||
|
||||
user_model = get_user_model()
|
||||
try:
|
||||
user = user_model.objects.get(username=username)
|
||||
except user_model.DoesNotExist as exc:
|
||||
raise CommandError(f"User not found for username '{username}'") from exc
|
||||
|
||||
token, raw_token = APIToken.create_token(user, name=name, expires_days=expires_days)
|
||||
|
||||
if options["inactive"]:
|
||||
token.is_active = False
|
||||
token.save(update_fields=["is_active"])
|
||||
|
||||
self.stdout.write(f"User: {user.username} ({user.email})")
|
||||
self.stdout.write(f"Token name: {token.name}")
|
||||
self.stdout.write(f"Token id: {token.id}")
|
||||
if token.expires_at:
|
||||
self.stdout.write(f"Expires at: {token.expires_at.isoformat()}")
|
||||
else:
|
||||
self.stdout.write("Expires at: never")
|
||||
self.stdout.write(f"Active: {token.is_active}")
|
||||
self.stdout.write("Raw token:")
|
||||
self.stdout.write(raw_token)
|
||||
|
||||
if options["curl"]:
|
||||
base_url = (
|
||||
options["base_url"] or getattr(s, "SERVER_URL", None) or "http://localhost:8000"
|
||||
)
|
||||
endpoint = options["endpoint"]
|
||||
endpoint = endpoint if endpoint.startswith("/") else f"/{endpoint}"
|
||||
url = f"{base_url.rstrip('/')}{endpoint}"
|
||||
curl_cmd = f'curl -H "Authorization: Bearer {raw_token}" "{url}"'
|
||||
self.stdout.write("Curl:")
|
||||
self.stdout.write(curl_cmd)
|
||||
@ -2,7 +2,9 @@ from django.conf import settings as s
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
|
||||
from epdb.models import MLRelativeReasoning, EnviFormer, Package
|
||||
from epdb.models import EnviFormer, MLRelativeReasoning
|
||||
|
||||
Package = s.GET_PACKAGE_MODEL()
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@ -75,11 +77,13 @@ class Command(BaseCommand):
|
||||
return packages
|
||||
|
||||
# Iteratively create models in options["model_names"]
|
||||
print(f"Creating models: {options['model_names']}\n"
|
||||
print(
|
||||
f"Creating models: {options['model_names']}\n"
|
||||
f"Data packages: {options['data_packages']}\n"
|
||||
f"Rule Packages (only for MLRR): {options['rule_packages']}\n"
|
||||
f"Eval Packages: {options['eval_packages']}\n"
|
||||
f"Threshold: {options['threshold']:.2f}")
|
||||
f"Threshold: {options['threshold']:.2f}"
|
||||
)
|
||||
data_packages = decode_packages(options["data_packages"])
|
||||
eval_packages = decode_packages(options["eval_packages"])
|
||||
rule_packages = decode_packages(options["rule_packages"])
|
||||
@ -89,8 +93,7 @@ class Command(BaseCommand):
|
||||
model = EnviFormer.create(
|
||||
pack,
|
||||
data_packages=data_packages,
|
||||
eval_packages=eval_packages,
|
||||
threshold=options['threshold'],
|
||||
threshold=options["threshold"],
|
||||
name=f"EnviFormer - {', '.join(options['data_packages'])} - T{options['threshold']:.2f}",
|
||||
description=f"EnviFormer transformer trained on {options['data_packages']} "
|
||||
f"evaluated on {options['eval_packages']}.",
|
||||
@ -100,8 +103,7 @@ class Command(BaseCommand):
|
||||
package=pack,
|
||||
rule_packages=rule_packages,
|
||||
data_packages=data_packages,
|
||||
eval_packages=eval_packages,
|
||||
threshold=options['threshold'],
|
||||
threshold=options["threshold"],
|
||||
name=f"ECC - {', '.join(options['data_packages'])} - T{options['threshold']:.2f}",
|
||||
description=f"ML Relative Reasoning trained on {options['data_packages']} with rules from "
|
||||
f"{options['rule_packages']} and evaluated on {options['eval_packages']}.",
|
||||
|
||||
@ -47,7 +47,7 @@ class Command(BaseCommand):
|
||||
"description": model.description,
|
||||
"kv": model.kv,
|
||||
"data_packages_uuids": [str(p.uuid) for p in model.data_packages.all()],
|
||||
"eval_packages_uuids": [str(p.uuid) for p in model.data_packages.all()],
|
||||
"eval_packages_uuids": [str(p.uuid) for p in model.eval_packages.all()],
|
||||
"threshold": model.threshold,
|
||||
"eval_results": model.eval_results,
|
||||
"multigen_eval": model.multigen_eval,
|
||||
|
||||
@ -8,7 +8,9 @@ from django.conf import settings as s
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
|
||||
from epdb.models import EnviFormer, Package
|
||||
from epdb.models import EnviFormer
|
||||
|
||||
Package = s.GET_PACKAGE_MODEL()
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
from django.apps import apps
|
||||
from django.conf import settings as s
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from django.db.models import F, Value, TextField, JSONField
|
||||
from django.db.models.functions import Replace, Cast
|
||||
from django.db.models import F, JSONField, TextField, Value
|
||||
from django.db.models.functions import Cast, Replace
|
||||
|
||||
from epdb.models import EnviPathModel
|
||||
|
||||
@ -23,10 +23,12 @@ class Command(BaseCommand):
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
Package = s.GET_PACKAGE_MODEL()
|
||||
Package.objects.update(url=Replace(F("url"), Value(options["old"]), Value(options["new"])))
|
||||
|
||||
MODELS = [
|
||||
"User",
|
||||
"Group",
|
||||
"Package",
|
||||
"Compound",
|
||||
"CompoundStructure",
|
||||
"Pathway",
|
||||
@ -47,7 +49,6 @@ class Command(BaseCommand):
|
||||
]
|
||||
for model in MODELS:
|
||||
obj_cls = apps.get_model("epdb", model)
|
||||
print(f"Localizing urls for {model}")
|
||||
obj_cls.objects.update(
|
||||
url=Replace(F("url"), Value(options["old"]), Value(options["new"]))
|
||||
)
|
||||
|
||||
@ -1,6 +1,5 @@
|
||||
# Generated by Django 5.2.1 on 2025-07-22 20:58
|
||||
# Generated by Django 5.2.7 on 2026-02-12 12:36
|
||||
|
||||
import datetime
|
||||
import django.contrib.auth.models
|
||||
import django.contrib.auth.validators
|
||||
import django.contrib.postgres.fields
|
||||
@ -19,11 +18,12 @@ class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('auth', '0012_alter_user_first_name_max_length'),
|
||||
('contenttypes', '0002_remove_content_type_name'),
|
||||
migrations.swappable_dependency(settings.EPDB_PACKAGE_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Compound',
|
||||
name='ApplicabilityDomain',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
|
||||
@ -31,9 +31,33 @@ class Migration(migrations.Migration):
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, unique=True, verbose_name='UUID of this object')),
|
||||
('name', models.TextField(default='no name', verbose_name='Name')),
|
||||
('description', models.TextField(default='no description', verbose_name='Descriptions')),
|
||||
('url', models.TextField(null=True, unique=True, verbose_name='URL')),
|
||||
('kv', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('num_neighbours', models.IntegerField(default=5)),
|
||||
('reliability_threshold', models.FloatField(default=0.5)),
|
||||
('local_compatibilty_threshold', models.FloatField(default=0.5)),
|
||||
('functional_groups', models.JSONField(blank=True, default=dict, null=True)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Edge',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
|
||||
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, unique=True, verbose_name='UUID of this object')),
|
||||
('name', models.TextField(default='no name', verbose_name='Name')),
|
||||
('description', models.TextField(default='no description', verbose_name='Descriptions')),
|
||||
('url', models.TextField(null=True, unique=True, verbose_name='URL')),
|
||||
('kv', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('aliases', django.contrib.postgres.fields.ArrayField(base_field=models.TextField(), default=list, size=None, verbose_name='Aliases')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='EPModel',
|
||||
@ -44,7 +68,9 @@ class Migration(migrations.Migration):
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, unique=True, verbose_name='UUID of this object')),
|
||||
('name', models.TextField(default='no name', verbose_name='Name')),
|
||||
('description', models.TextField(default='no description', verbose_name='Descriptions')),
|
||||
('url', models.TextField(null=True, unique=True, verbose_name='URL')),
|
||||
('kv', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('package', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.EPDB_PACKAGE_MODEL, verbose_name='Package')),
|
||||
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
|
||||
],
|
||||
options={
|
||||
@ -52,6 +78,27 @@ class Migration(migrations.Migration):
|
||||
'base_manager_name': 'objects',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ExternalDatabase',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
|
||||
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, unique=True)),
|
||||
('name', models.CharField(max_length=100, unique=True, verbose_name='Database Name')),
|
||||
('full_name', models.CharField(blank=True, max_length=255, verbose_name='Full Database Name')),
|
||||
('description', models.TextField(blank=True, verbose_name='Description')),
|
||||
('base_url', models.URLField(blank=True, null=True, verbose_name='Base URL')),
|
||||
('url_pattern', models.CharField(blank=True, help_text="URL pattern with {id} placeholder, e.g., 'https://pubchem.ncbi.nlm.nih.gov/compound/{id}'", max_length=500, verbose_name='URL Pattern')),
|
||||
('is_active', models.BooleanField(default=True, verbose_name='Is Active')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'External Database',
|
||||
'verbose_name_plural': 'External Databases',
|
||||
'db_table': 'epdb_external_database',
|
||||
'ordering': ['name'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Permission',
|
||||
fields=[
|
||||
@ -65,6 +112,7 @@ class Migration(migrations.Migration):
|
||||
name='License',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('cc_string', models.TextField(verbose_name='CC string')),
|
||||
('link', models.URLField(verbose_name='link')),
|
||||
('image_link', models.URLField(verbose_name='Image link')),
|
||||
],
|
||||
@ -78,8 +126,11 @@ class Migration(migrations.Migration):
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, unique=True, verbose_name='UUID of this object')),
|
||||
('name', models.TextField(default='no name', verbose_name='Name')),
|
||||
('description', models.TextField(default='no description', verbose_name='Descriptions')),
|
||||
('url', models.TextField(null=True, unique=True, verbose_name='URL')),
|
||||
('kv', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('aliases', django.contrib.postgres.fields.ArrayField(base_field=models.TextField(), default=list, size=None, verbose_name='Aliases')),
|
||||
('package', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.EPDB_PACKAGE_MODEL, verbose_name='Package')),
|
||||
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
@ -101,6 +152,9 @@ class Migration(migrations.Migration):
|
||||
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
|
||||
('email', models.EmailField(max_length=254, unique=True)),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, unique=True, verbose_name='UUID of this object')),
|
||||
('url', models.TextField(null=True, unique=True, verbose_name='URL')),
|
||||
('is_reviewer', models.BooleanField(default=False)),
|
||||
('default_package', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.EPDB_PACKAGE_MODEL, verbose_name='Default Package')),
|
||||
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.group', verbose_name='groups')),
|
||||
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.permission', verbose_name='user permissions')),
|
||||
],
|
||||
@ -117,11 +171,34 @@ class Migration(migrations.Migration):
|
||||
name='APIToken',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('hashed_key', models.CharField(max_length=128, unique=True)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('expires_at', models.DateTimeField(blank=True, default=datetime.datetime(2025, 10, 20, 20, 58, 48, 351675, tzinfo=datetime.timezone.utc), null=True)),
|
||||
('name', models.CharField(blank=True, help_text='Optional name for the token', max_length=100)),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
|
||||
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
|
||||
('hashed_key', models.CharField(help_text='SHA-256 hash of the token key', max_length=128, unique=True)),
|
||||
('expires_at', models.DateTimeField(blank=True, help_text='Token expiration time (null for no expiration)', null=True)),
|
||||
('name', models.CharField(help_text='Descriptive name for this token', max_length=100)),
|
||||
('is_active', models.BooleanField(default=True, help_text='Whether this token is active')),
|
||||
('user', models.ForeignKey(help_text='User who owns this token', on_delete=django.db.models.deletion.CASCADE, related_name='api_tokens', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'API Token',
|
||||
'verbose_name_plural': 'API Tokens',
|
||||
'db_table': 'epdb_api_token',
|
||||
'ordering': ['-created'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Compound',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
|
||||
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, unique=True, verbose_name='UUID of this object')),
|
||||
('name', models.TextField(default='no name', verbose_name='Name')),
|
||||
('description', models.TextField(default='no description', verbose_name='Descriptions')),
|
||||
('url', models.TextField(null=True, unique=True, verbose_name='URL')),
|
||||
('kv', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('aliases', django.contrib.postgres.fields.ArrayField(base_field=models.TextField(), default=list, size=None, verbose_name='Aliases')),
|
||||
('package', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.EPDB_PACKAGE_MODEL, verbose_name='Package')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
@ -133,6 +210,7 @@ class Migration(migrations.Migration):
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, unique=True, verbose_name='UUID of this object')),
|
||||
('name', models.TextField(default='no name', verbose_name='Name')),
|
||||
('description', models.TextField(default='no description', verbose_name='Descriptions')),
|
||||
('url', models.TextField(null=True, unique=True, verbose_name='URL')),
|
||||
('kv', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('aliases', django.contrib.postgres.fields.ArrayField(base_field=models.TextField(), default=list, size=None, verbose_name='Aliases')),
|
||||
('smiles', models.TextField(verbose_name='SMILES')),
|
||||
@ -150,36 +228,6 @@ class Migration(migrations.Migration):
|
||||
name='default_structure',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='compound_default_structure', to='epdb.compoundstructure', verbose_name='Default Structure'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Edge',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
|
||||
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, unique=True, verbose_name='UUID of this object')),
|
||||
('name', models.TextField(default='no name', verbose_name='Name')),
|
||||
('description', models.TextField(default='no description', verbose_name='Descriptions')),
|
||||
('kv', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('aliases', django.contrib.postgres.fields.ArrayField(base_field=models.TextField(), default=list, size=None, verbose_name='Aliases')),
|
||||
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
'base_manager_name': 'objects',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='EnviFormer',
|
||||
fields=[
|
||||
('epmodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='epdb.epmodel')),
|
||||
('threshold', models.FloatField(default=0.5)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
'base_manager_name': 'objects',
|
||||
},
|
||||
bases=('epdb.epmodel',),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='PluginModel',
|
||||
fields=[
|
||||
@ -191,17 +239,6 @@ class Migration(migrations.Migration):
|
||||
},
|
||||
bases=('epdb.epmodel',),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='RuleBaseRelativeReasoning',
|
||||
fields=[
|
||||
('epmodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='epdb.epmodel')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
'base_manager_name': 'objects',
|
||||
},
|
||||
bases=('epdb.epmodel',),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Group',
|
||||
fields=[
|
||||
@ -209,10 +246,11 @@ class Migration(migrations.Migration):
|
||||
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
|
||||
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, unique=True, verbose_name='UUID of this object')),
|
||||
('url', models.TextField(null=True, unique=True, verbose_name='URL')),
|
||||
('name', models.TextField(verbose_name='Group name')),
|
||||
('public', models.BooleanField(default=False, verbose_name='Public Group')),
|
||||
('description', models.TextField(default='no description', verbose_name='Descriptions')),
|
||||
('group_member', models.ManyToManyField(related_name='groups_in_group', to='epdb.group', verbose_name='Group member')),
|
||||
('group_member', models.ManyToManyField(blank=True, related_name='groups_in_group', to='epdb.group', verbose_name='Group member')),
|
||||
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='Group Owner')),
|
||||
('user_member', models.ManyToManyField(related_name='users_in_group', to=settings.AUTH_USER_MODEL, verbose_name='User members')),
|
||||
],
|
||||
@ -225,6 +263,41 @@ class Migration(migrations.Migration):
|
||||
name='default_group',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='default_group', to='epdb.group', verbose_name='Default Group'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='JobLog',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
|
||||
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
|
||||
('task_id', models.UUIDField(unique=True)),
|
||||
('job_name', models.TextField()),
|
||||
('status', models.CharField(choices=[('INITIAL', 'Initial'), ('SUCCESS', 'Success'), ('FAILURE', 'Failure'), ('REVOKED', 'Revoked'), ('IGNORED', 'Ignored')], default='INITIAL', max_length=20)),
|
||||
('done_at', models.DateTimeField(blank=True, default=None, null=True)),
|
||||
('task_result', models.TextField(blank=True, default=None, null=True)),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Package',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
|
||||
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, unique=True, verbose_name='UUID of this object')),
|
||||
('name', models.TextField(default='no name', verbose_name='Name')),
|
||||
('description', models.TextField(default='no description', verbose_name='Descriptions')),
|
||||
('url', models.TextField(null=True, unique=True, verbose_name='URL')),
|
||||
('kv', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('reviewed', models.BooleanField(default=False, verbose_name='Reviewstatus')),
|
||||
('license', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='epdb.license', verbose_name='License')),
|
||||
],
|
||||
options={
|
||||
'swappable': 'EPDB_PACKAGE_MODEL',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Node',
|
||||
fields=[
|
||||
@ -234,9 +307,11 @@ class Migration(migrations.Migration):
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, unique=True, verbose_name='UUID of this object')),
|
||||
('name', models.TextField(default='no name', verbose_name='Name')),
|
||||
('description', models.TextField(default='no description', verbose_name='Descriptions')),
|
||||
('url', models.TextField(null=True, unique=True, verbose_name='URL')),
|
||||
('kv', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('aliases', django.contrib.postgres.fields.ArrayField(base_field=models.TextField(), default=list, size=None, verbose_name='Aliases')),
|
||||
('depth', models.IntegerField(verbose_name='Node depth')),
|
||||
('stereo_removed', models.BooleanField(default=False)),
|
||||
('default_node_label', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='default_node_structure', to='epdb.compoundstructure', verbose_name='Default Node Label')),
|
||||
('node_labels', models.ManyToManyField(related_name='node_structures', to='epdb.compoundstructure', verbose_name='All Node Labels')),
|
||||
('out_edges', models.ManyToManyField(to='epdb.edge', verbose_name='Outgoing Edges')),
|
||||
@ -255,38 +330,6 @@ class Migration(migrations.Migration):
|
||||
name='start_nodes',
|
||||
field=models.ManyToManyField(related_name='edge_educts', to='epdb.node', verbose_name='Start Nodes'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Package',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
|
||||
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, unique=True, verbose_name='UUID of this object')),
|
||||
('name', models.TextField(default='no name', verbose_name='Name')),
|
||||
('description', models.TextField(default='no description', verbose_name='Descriptions')),
|
||||
('kv', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('reviewed', models.BooleanField(default=False, verbose_name='Reviewstatus')),
|
||||
('license', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='epdb.license', verbose_name='License')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='epmodel',
|
||||
name='package',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='epdb.package', verbose_name='Package'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='compound',
|
||||
name='package',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='epdb.package', verbose_name='Package'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='default_package',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='epdb.package', verbose_name='Default Package'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SequentialRule',
|
||||
fields=[
|
||||
@ -309,16 +352,6 @@ class Migration(migrations.Migration):
|
||||
},
|
||||
bases=('epdb.rule',),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='rule',
|
||||
name='package',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='epdb.package', verbose_name='Package'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='rule',
|
||||
name='polymorphic_ctype',
|
||||
field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Pathway',
|
||||
fields=[
|
||||
@ -328,9 +361,11 @@ class Migration(migrations.Migration):
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, unique=True, verbose_name='UUID of this object')),
|
||||
('name', models.TextField(default='no name', verbose_name='Name')),
|
||||
('description', models.TextField(default='no description', verbose_name='Descriptions')),
|
||||
('url', models.TextField(null=True, unique=True, verbose_name='URL')),
|
||||
('kv', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('aliases', django.contrib.postgres.fields.ArrayField(base_field=models.TextField(), default=list, size=None, verbose_name='Aliases')),
|
||||
('package', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='epdb.package', verbose_name='Package')),
|
||||
('predicted', models.BooleanField(default=False)),
|
||||
('package', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.EPDB_PACKAGE_MODEL, verbose_name='Package')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
@ -355,12 +390,13 @@ class Migration(migrations.Migration):
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, unique=True, verbose_name='UUID of this object')),
|
||||
('name', models.TextField(default='no name', verbose_name='Name')),
|
||||
('description', models.TextField(default='no description', verbose_name='Descriptions')),
|
||||
('url', models.TextField(null=True, unique=True, verbose_name='URL')),
|
||||
('kv', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('aliases', django.contrib.postgres.fields.ArrayField(base_field=models.TextField(), default=list, size=None, verbose_name='Aliases')),
|
||||
('multi_step', models.BooleanField(verbose_name='Multistep Reaction')),
|
||||
('medline_references', django.contrib.postgres.fields.ArrayField(base_field=models.TextField(), null=True, size=None, verbose_name='Medline References')),
|
||||
('educts', models.ManyToManyField(related_name='reaction_educts', to='epdb.compoundstructure', verbose_name='Educts')),
|
||||
('package', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='epdb.package', verbose_name='Package')),
|
||||
('package', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.EPDB_PACKAGE_MODEL, verbose_name='Package')),
|
||||
('products', models.ManyToManyField(related_name='reaction_products', to='epdb.compoundstructure', verbose_name='Products')),
|
||||
('rules', models.ManyToManyField(related_name='reaction_rule', to='epdb.rule', verbose_name='Rule')),
|
||||
],
|
||||
@ -368,6 +404,28 @@ class Migration(migrations.Migration):
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='EnzymeLink',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
|
||||
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, unique=True, verbose_name='UUID of this object')),
|
||||
('name', models.TextField(default='no name', verbose_name='Name')),
|
||||
('description', models.TextField(default='no description', verbose_name='Descriptions')),
|
||||
('url', models.TextField(null=True, unique=True, verbose_name='URL')),
|
||||
('kv', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('ec_number', models.TextField(verbose_name='EC Number')),
|
||||
('classification_level', models.IntegerField(verbose_name='Classification Level')),
|
||||
('linking_method', models.TextField(verbose_name='Linking Method')),
|
||||
('edge_evidence', models.ManyToManyField(to='epdb.edge')),
|
||||
('rule', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='epdb.rule')),
|
||||
('reaction_evidence', models.ManyToManyField(to='epdb.reaction')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='edge',
|
||||
name='edge_label',
|
||||
@ -382,11 +440,12 @@ class Migration(migrations.Migration):
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, unique=True, verbose_name='UUID of this object')),
|
||||
('name', models.TextField(default='no name', verbose_name='Name')),
|
||||
('description', models.TextField(default='no description', verbose_name='Descriptions')),
|
||||
('url', models.TextField(null=True, unique=True, verbose_name='URL')),
|
||||
('kv', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('scenario_date', models.CharField(default='No date', max_length=256)),
|
||||
('scenario_type', models.CharField(default='Not specified', max_length=256)),
|
||||
('additional_information', models.JSONField(verbose_name='Additional Information')),
|
||||
('package', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='epdb.package', verbose_name='Package')),
|
||||
('package', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.EPDB_PACKAGE_MODEL, verbose_name='Package')),
|
||||
('parent', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='epdb.scenario')),
|
||||
],
|
||||
options={
|
||||
@ -437,14 +496,16 @@ class Migration(migrations.Migration):
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, unique=True, verbose_name='UUID of this object')),
|
||||
('name', models.TextField(default='no name', verbose_name='Name')),
|
||||
('description', models.TextField(default='no description', verbose_name='Descriptions')),
|
||||
('url', models.TextField(null=True, unique=True, verbose_name='URL')),
|
||||
('kv', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('public', models.BooleanField(default=False)),
|
||||
('global_default', models.BooleanField(default=False)),
|
||||
('max_depth', models.IntegerField(default=5, verbose_name='Setting Max Depth')),
|
||||
('max_nodes', models.IntegerField(default=30, verbose_name='Setting Max Number of Nodes')),
|
||||
('model_threshold', models.FloatField(blank=True, default=0.25, null=True, verbose_name='Setting Model Threshold')),
|
||||
('expansion_scheme', models.CharField(choices=[('BFS', 'Breadth First Search'), ('DFS', 'Depth First Search'), ('GREEDY', 'Greedy')], default='BFS', max_length=20)),
|
||||
('model', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='epdb.epmodel', verbose_name='Setting EPModel')),
|
||||
('rule_packages', models.ManyToManyField(blank=True, related_name='setting_rule_packages', to='epdb.package', verbose_name='Setting Rule Packages')),
|
||||
('rule_packages', models.ManyToManyField(blank=True, related_name='setting_rule_packages', to=settings.EPDB_PACKAGE_MODEL, verbose_name='Setting Rule Packages')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
@ -461,39 +522,86 @@ class Migration(migrations.Migration):
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='epdb.setting', verbose_name='The users default settings'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='MLRelativeReasoning',
|
||||
name='EnviFormer',
|
||||
fields=[
|
||||
('epmodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='epdb.epmodel')),
|
||||
('threshold', models.FloatField(default=0.5)),
|
||||
('model_status', models.CharField(choices=[('INITIAL', 'Initial'), ('INITIALIZING', 'Model is initializing.'), ('BUILDING', 'Model is building.'), ('BUILT_NOT_EVALUATED', 'Model is built and can be used for predictions, Model is not evaluated yet.'), ('EVALUATING', 'Model is evaluating'), ('FINISHED', 'Model has finished building and evaluation.'), ('ERROR', 'Model has failed.')], default='INITIAL')),
|
||||
('eval_results', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('data_packages', models.ManyToManyField(related_name='data_packages', to='epdb.package', verbose_name='Data Packages')),
|
||||
('eval_packages', models.ManyToManyField(related_name='eval_packages', to='epdb.package', verbose_name='Evaluation Packages')),
|
||||
('rule_packages', models.ManyToManyField(related_name='rule_packages', to='epdb.package', verbose_name='Rule Packages')),
|
||||
('multigen_eval', models.BooleanField(default=False)),
|
||||
('model_status', models.CharField(choices=[('INITIAL', 'Initial'), ('INITIALIZING', 'Model is initializing.'), ('BUILDING', 'Model is building.'), ('BUILT_NOT_EVALUATED', 'Model is built and can be used for predictions, Model is not evaluated yet.'), ('EVALUATING', 'Model is evaluating'), ('FINISHED', 'Model has finished building and evaluation.'), ('ERROR', 'Model has failed.')], default='INITIAL')),
|
||||
('app_domain', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to='epdb.applicabilitydomain')),
|
||||
('data_packages', models.ManyToManyField(related_name='%(app_label)s_%(class)s_data_packages', to=settings.EPDB_PACKAGE_MODEL, verbose_name='Data Packages')),
|
||||
('eval_packages', models.ManyToManyField(related_name='%(app_label)s_%(class)s_eval_packages', to=settings.EPDB_PACKAGE_MODEL, verbose_name='Evaluation Packages')),
|
||||
('rule_packages', models.ManyToManyField(related_name='%(app_label)s_%(class)s_rule_packages', to=settings.EPDB_PACKAGE_MODEL, verbose_name='Rule Packages')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
'base_manager_name': 'objects',
|
||||
},
|
||||
bases=('epdb.epmodel',),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ApplicabilityDomain',
|
||||
name='MLRelativeReasoning',
|
||||
fields=[
|
||||
('epmodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='epdb.epmodel')),
|
||||
('threshold', models.FloatField(default=0.5)),
|
||||
('eval_results', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('multigen_eval', models.BooleanField(default=False)),
|
||||
('model_status', models.CharField(choices=[('INITIAL', 'Initial'), ('INITIALIZING', 'Model is initializing.'), ('BUILDING', 'Model is building.'), ('BUILT_NOT_EVALUATED', 'Model is built and can be used for predictions, Model is not evaluated yet.'), ('EVALUATING', 'Model is evaluating'), ('FINISHED', 'Model has finished building and evaluation.'), ('ERROR', 'Model has failed.')], default='INITIAL')),
|
||||
('app_domain', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to='epdb.applicabilitydomain')),
|
||||
('data_packages', models.ManyToManyField(related_name='%(app_label)s_%(class)s_data_packages', to=settings.EPDB_PACKAGE_MODEL, verbose_name='Data Packages')),
|
||||
('eval_packages', models.ManyToManyField(related_name='%(app_label)s_%(class)s_eval_packages', to=settings.EPDB_PACKAGE_MODEL, verbose_name='Evaluation Packages')),
|
||||
('rule_packages', models.ManyToManyField(related_name='%(app_label)s_%(class)s_rule_packages', to=settings.EPDB_PACKAGE_MODEL, verbose_name='Rule Packages')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
bases=('epdb.epmodel',),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='applicabilitydomain',
|
||||
name='model',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='epdb.mlrelativereasoning'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='RuleBasedRelativeReasoning',
|
||||
fields=[
|
||||
('epmodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='epdb.epmodel')),
|
||||
('threshold', models.FloatField(default=0.5)),
|
||||
('eval_results', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('multigen_eval', models.BooleanField(default=False)),
|
||||
('model_status', models.CharField(choices=[('INITIAL', 'Initial'), ('INITIALIZING', 'Model is initializing.'), ('BUILDING', 'Model is building.'), ('BUILT_NOT_EVALUATED', 'Model is built and can be used for predictions, Model is not evaluated yet.'), ('EVALUATING', 'Model is evaluating'), ('FINISHED', 'Model has finished building and evaluation.'), ('ERROR', 'Model has failed.')], default='INITIAL')),
|
||||
('min_count', models.IntegerField(default=10)),
|
||||
('max_count', models.IntegerField(default=0)),
|
||||
('app_domain', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to='epdb.applicabilitydomain')),
|
||||
('data_packages', models.ManyToManyField(related_name='%(app_label)s_%(class)s_data_packages', to=settings.EPDB_PACKAGE_MODEL, verbose_name='Data Packages')),
|
||||
('eval_packages', models.ManyToManyField(related_name='%(app_label)s_%(class)s_eval_packages', to=settings.EPDB_PACKAGE_MODEL, verbose_name='Evaluation Packages')),
|
||||
('rule_packages', models.ManyToManyField(related_name='%(app_label)s_%(class)s_rule_packages', to=settings.EPDB_PACKAGE_MODEL, verbose_name='Rule Packages')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
bases=('epdb.epmodel',),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ExternalIdentifier',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
|
||||
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, unique=True, verbose_name='UUID of this object')),
|
||||
('name', models.TextField(default='no name', verbose_name='Name')),
|
||||
('description', models.TextField(default='no description', verbose_name='Descriptions')),
|
||||
('kv', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('num_neighbours', models.FloatField(default=5)),
|
||||
('reliability_threshold', models.FloatField(default=0.5)),
|
||||
('local_compatibilty_threshold', models.FloatField(default=0.5)),
|
||||
('model', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='epdb.mlrelativereasoning')),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, unique=True)),
|
||||
('object_id', models.IntegerField()),
|
||||
('identifier_value', models.CharField(max_length=255, verbose_name='Identifier Value')),
|
||||
('url', models.URLField(blank=True, null=True, verbose_name='Direct URL')),
|
||||
('is_primary', models.BooleanField(default=False, help_text='Mark this as the primary identifier for this database', verbose_name='Is Primary')),
|
||||
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
|
||||
('database', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='epdb.externaldatabase', verbose_name='External Database')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
'verbose_name': 'External Identifier',
|
||||
'verbose_name_plural': 'External Identifiers',
|
||||
'db_table': 'epdb_external_identifier',
|
||||
'indexes': [models.Index(fields=['content_type', 'object_id'], name='epdb_extern_content_b76813_idx'), models.Index(fields=['database', 'identifier_value'], name='epdb_extern_databas_486422_idx')],
|
||||
'unique_together': {('content_type', 'object_id', 'database', 'identifier_value')},
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
@ -558,7 +666,7 @@ class Migration(migrations.Migration):
|
||||
('permission_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, to='epdb.permission')),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False, verbose_name='UUID of this object')),
|
||||
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='epdb.group', verbose_name='Permission to')),
|
||||
('package', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='epdb.package', verbose_name='Permission on')),
|
||||
('package', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.EPDB_PACKAGE_MODEL, verbose_name='Permission on')),
|
||||
],
|
||||
options={
|
||||
'unique_together': {('package', 'group')},
|
||||
@ -570,7 +678,7 @@ class Migration(migrations.Migration):
|
||||
fields=[
|
||||
('permission_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, to='epdb.permission')),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False, verbose_name='UUID of this object')),
|
||||
('package', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='epdb.package', verbose_name='Permission on')),
|
||||
('package', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.EPDB_PACKAGE_MODEL, verbose_name='Permission on')),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='Permission to')),
|
||||
],
|
||||
options={
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
59
epdb/migrations/0002_auto_20260212_1240.py
Normal file
59
epdb/migrations/0002_auto_20260212_1240.py
Normal file
@ -0,0 +1,59 @@
|
||||
# Generated by Django 5.2.7 on 2025-11-11 14:13
|
||||
|
||||
import re
|
||||
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.db import migrations
|
||||
from django.db.models import Min
|
||||
|
||||
|
||||
def set_cc(apps, schema_editor):
|
||||
License = apps.get_model("epdb", "License")
|
||||
|
||||
# For all existing licenses extract cc_string from link
|
||||
for license in License.objects.all():
|
||||
pattern = r"/licenses/([^/]+)/4\.0"
|
||||
match = re.search(pattern, license.link)
|
||||
if match:
|
||||
license.cc_string = match.group(1)
|
||||
license.save()
|
||||
else:
|
||||
raise ValueError(f"Could not find license for {license.link}")
|
||||
|
||||
# Ensure we have all licenses
|
||||
cc_strings = ["by", "by-nc", "by-nc-nd", "by-nc-sa", "by-nd", "by-sa"]
|
||||
for cc_string in cc_strings:
|
||||
if not License.objects.filter(cc_string=cc_string).exists():
|
||||
new_license = License()
|
||||
new_license.cc_string = cc_string
|
||||
new_license.link = f"https://creativecommons.org/licenses/{cc_string}/4.0/"
|
||||
new_license.image_link = f"https://licensebuttons.net/l/{cc_string}/4.0/88x31.png"
|
||||
new_license.save()
|
||||
|
||||
# As we might have existing Licenses representing the same License,
|
||||
# get min pk and all pks as a list
|
||||
license_lookup_qs = License.objects.values("cc_string").annotate(
|
||||
lowest_pk=Min("id"), all_pks=ArrayAgg("id", order_by=("id",))
|
||||
)
|
||||
|
||||
license_lookup = {
|
||||
row["cc_string"]: (row["lowest_pk"], row["all_pks"]) for row in license_lookup_qs
|
||||
}
|
||||
|
||||
Packages = apps.get_model("bayer", "Package")
|
||||
|
||||
for k, v in license_lookup.items():
|
||||
# Set min pk to all packages pointing to any of the duplicates
|
||||
Packages.objects.filter(pk__in=v[1]).update(license_id=v[0])
|
||||
# remove the min pk from "other" pks as we use them for deletion
|
||||
v[1].remove(v[0])
|
||||
# Delete redundant License objects
|
||||
License.objects.filter(pk__in=v[1]).delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("epdb", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [migrations.RunPython(set_cc)]
|
||||
@ -1,128 +0,0 @@
|
||||
# Generated by Django 5.2.1 on 2025-08-25 18:07
|
||||
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import model_utils.fields
|
||||
import uuid
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('contenttypes', '0002_remove_content_type_name'),
|
||||
('epdb', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='ExternalDatabase',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
|
||||
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, unique=True)),
|
||||
('name', models.CharField(max_length=100, unique=True, verbose_name='Database Name')),
|
||||
('full_name', models.CharField(blank=True, max_length=255, verbose_name='Full Database Name')),
|
||||
('description', models.TextField(blank=True, verbose_name='Description')),
|
||||
('base_url', models.URLField(blank=True, null=True, verbose_name='Base URL')),
|
||||
('url_pattern', models.CharField(blank=True, help_text="URL pattern with {id} placeholder, e.g., 'https://pubchem.ncbi.nlm.nih.gov/compound/{id}'", max_length=500, verbose_name='URL Pattern')),
|
||||
('is_active', models.BooleanField(default=True, verbose_name='Is Active')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'External Database',
|
||||
'verbose_name_plural': 'External Databases',
|
||||
'db_table': 'epdb_external_database',
|
||||
'ordering': ['name'],
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='apitoken',
|
||||
options={'ordering': ['-created'], 'verbose_name': 'API Token', 'verbose_name_plural': 'API Tokens'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='edge',
|
||||
options={},
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='edge',
|
||||
name='polymorphic_ctype',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='apitoken',
|
||||
name='is_active',
|
||||
field=models.BooleanField(default=True, help_text='Whether this token is active'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='apitoken',
|
||||
name='modified',
|
||||
field=model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='applicabilitydomain',
|
||||
name='functional_groups',
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='mlrelativereasoning',
|
||||
name='app_domain',
|
||||
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to='epdb.applicabilitydomain'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='apitoken',
|
||||
name='created',
|
||||
field=model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='apitoken',
|
||||
name='expires_at',
|
||||
field=models.DateTimeField(blank=True, help_text='Token expiration time (null for no expiration)', null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='apitoken',
|
||||
name='hashed_key',
|
||||
field=models.CharField(help_text='SHA-256 hash of the token key', max_length=128, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='apitoken',
|
||||
name='name',
|
||||
field=models.CharField(help_text='Descriptive name for this token', max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='apitoken',
|
||||
name='user',
|
||||
field=models.ForeignKey(help_text='User who owns this token', on_delete=django.db.models.deletion.CASCADE, related_name='api_tokens', to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='applicabilitydomain',
|
||||
name='num_neighbours',
|
||||
field=models.IntegerField(default=5),
|
||||
),
|
||||
migrations.AlterModelTable(
|
||||
name='apitoken',
|
||||
table='epdb_api_token',
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ExternalIdentifier',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
|
||||
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, unique=True)),
|
||||
('object_id', models.IntegerField()),
|
||||
('identifier_value', models.CharField(max_length=255, verbose_name='Identifier Value')),
|
||||
('url', models.URLField(blank=True, null=True, verbose_name='Direct URL')),
|
||||
('is_primary', models.BooleanField(default=False, help_text='Mark this as the primary identifier for this database', verbose_name='Is Primary')),
|
||||
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
|
||||
('database', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='epdb.externaldatabase', verbose_name='External Database')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'External Identifier',
|
||||
'verbose_name_plural': 'External Identifiers',
|
||||
'db_table': 'epdb_external_identifier',
|
||||
'indexes': [models.Index(fields=['content_type', 'object_id'], name='epdb_extern_content_b76813_idx'), models.Index(fields=['database', 'identifier_value'], name='epdb_extern_databas_486422_idx')],
|
||||
'unique_together': {('content_type', 'object_id', 'database', 'identifier_value')},
|
||||
},
|
||||
),
|
||||
]
|
||||
@ -1,228 +0,0 @@
|
||||
# Generated by Django 5.2.1 on 2025-08-26 17:05
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def populate_url(apps, schema_editor):
|
||||
MODELS = [
|
||||
'User',
|
||||
'Group',
|
||||
'Package',
|
||||
'Compound',
|
||||
'CompoundStructure',
|
||||
'Pathway',
|
||||
'Edge',
|
||||
'Node',
|
||||
'Reaction',
|
||||
'SimpleAmbitRule',
|
||||
'SimpleRDKitRule',
|
||||
'ParallelRule',
|
||||
'SequentialRule',
|
||||
'Scenario',
|
||||
'Setting',
|
||||
'MLRelativeReasoning',
|
||||
'EnviFormer',
|
||||
'ApplicabilityDomain',
|
||||
]
|
||||
for model in MODELS:
|
||||
obj_cls = apps.get_model("epdb", model)
|
||||
for obj in obj_cls.objects.all():
|
||||
obj.url = assemble_url(obj)
|
||||
if obj.url is None:
|
||||
raise ValueError(f"Could not assemble url for {obj}")
|
||||
obj.save()
|
||||
|
||||
|
||||
def assemble_url(obj):
|
||||
from django.conf import settings as s
|
||||
match obj.__class__.__name__:
|
||||
case 'User':
|
||||
return '{}/user/{}'.format(s.SERVER_URL, obj.uuid)
|
||||
case 'Group':
|
||||
return '{}/group/{}'.format(s.SERVER_URL, obj.uuid)
|
||||
case 'Package':
|
||||
return '{}/package/{}'.format(s.SERVER_URL, obj.uuid)
|
||||
case 'Compound':
|
||||
return '{}/compound/{}'.format(obj.package.url, obj.uuid)
|
||||
case 'CompoundStructure':
|
||||
return '{}/structure/{}'.format(obj.compound.url, obj.uuid)
|
||||
case 'SimpleAmbitRule':
|
||||
return '{}/simple-ambit-rule/{}'.format(obj.package.url, obj.uuid)
|
||||
case 'SimpleRDKitRule':
|
||||
return '{}/simple-rdkit-rule/{}'.format(obj.package.url, obj.uuid)
|
||||
case 'ParallelRule':
|
||||
return '{}/parallel-rule/{}'.format(obj.package.url, obj.uuid)
|
||||
case 'SequentialRule':
|
||||
return '{}/sequential-rule/{}'.format(obj.compound.url, obj.uuid)
|
||||
case 'Reaction':
|
||||
return '{}/reaction/{}'.format(obj.package.url, obj.uuid)
|
||||
case 'Pathway':
|
||||
return '{}/pathway/{}'.format(obj.package.url, obj.uuid)
|
||||
case 'Node':
|
||||
return '{}/node/{}'.format(obj.pathway.url, obj.uuid)
|
||||
case 'Edge':
|
||||
return '{}/edge/{}'.format(obj.pathway.url, obj.uuid)
|
||||
case 'MLRelativeReasoning':
|
||||
return '{}/model/{}'.format(obj.package.url, obj.uuid)
|
||||
case 'EnviFormer':
|
||||
return '{}/model/{}'.format(obj.package.url, obj.uuid)
|
||||
case 'ApplicabilityDomain':
|
||||
return '{}/model/{}/applicability-domain/{}'.format(obj.model.package.url, obj.model.uuid, obj.uuid)
|
||||
case 'Scenario':
|
||||
return '{}/scenario/{}'.format(obj.package.url, obj.uuid)
|
||||
case 'Setting':
|
||||
return '{}/setting/{}'.format(s.SERVER_URL, obj.uuid)
|
||||
case _:
|
||||
raise ValueError(f"Unknown model {obj.__class__.__name__}")
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('epdb', '0002_externaldatabase_alter_apitoken_options_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='applicabilitydomain',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=False, verbose_name='URL'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='compound',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=False, verbose_name='URL'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='compoundstructure',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=False, verbose_name='URL'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='edge',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=False, verbose_name='URL'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='epmodel',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=False, verbose_name='URL'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='group',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=False, verbose_name='URL'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='node',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=False, verbose_name='URL'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='package',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=False, verbose_name='URL'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='pathway',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=False, verbose_name='URL'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='reaction',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=False, verbose_name='URL'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='rule',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=False, verbose_name='URL'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='scenario',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=False, verbose_name='URL'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='setting',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=False, verbose_name='URL'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=False, verbose_name='URL'),
|
||||
),
|
||||
|
||||
migrations.RunPython(populate_url, reverse_code=migrations.RunPython.noop),
|
||||
|
||||
migrations.AlterField(
|
||||
model_name='applicabilitydomain',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=True, verbose_name='URL'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='compound',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=True, verbose_name='URL'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='compoundstructure',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=True, verbose_name='URL'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='edge',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=True, verbose_name='URL'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='epmodel',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=True, verbose_name='URL'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='group',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=True, verbose_name='URL'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='node',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=True, verbose_name='URL'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='package',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=True, verbose_name='URL'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='pathway',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=True, verbose_name='URL'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='reaction',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=True, verbose_name='URL'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='rule',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=True, verbose_name='URL'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='scenario',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=True, verbose_name='URL'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='setting',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=True, verbose_name='URL'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='user',
|
||||
name='url',
|
||||
field=models.TextField(null=True, unique=True, verbose_name='URL'),
|
||||
),
|
||||
]
|
||||
@ -1,55 +0,0 @@
|
||||
# Generated by Django 5.2.1 on 2025-09-09 09:21
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epdb', '0001_squashed_0003_applicabilitydomain_url_compound_url_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='mlrelativereasoning',
|
||||
options={},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='mlrelativereasoning',
|
||||
name='data_packages',
|
||||
field=models.ManyToManyField(related_name='%(app_label)s_%(class)s_data_packages', to='epdb.package', verbose_name='Data Packages'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='mlrelativereasoning',
|
||||
name='eval_packages',
|
||||
field=models.ManyToManyField(related_name='%(app_label)s_%(class)s_eval_packages', to='epdb.package', verbose_name='Evaluation Packages'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='mlrelativereasoning',
|
||||
name='rule_packages',
|
||||
field=models.ManyToManyField(related_name='%(app_label)s_%(class)s_rule_packages', to='epdb.package', verbose_name='Rule Packages'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='RuleBasedRelativeReasoning',
|
||||
fields=[
|
||||
('epmodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='epdb.epmodel')),
|
||||
('threshold', models.FloatField(default=0.5)),
|
||||
('eval_results', models.JSONField(blank=True, default=dict, null=True)),
|
||||
('model_status', models.CharField(choices=[('INITIAL', 'Initial'), ('INITIALIZING', 'Model is initializing.'), ('BUILDING', 'Model is building.'), ('BUILT_NOT_EVALUATED', 'Model is built and can be used for predictions, Model is not evaluated yet.'), ('EVALUATING', 'Model is evaluating'), ('FINISHED', 'Model has finished building and evaluation.'), ('ERROR', 'Model has failed.')], default='INITIAL')),
|
||||
('min_count', models.IntegerField(default=10)),
|
||||
('max_count', models.IntegerField(default=0)),
|
||||
('app_domain', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to='epdb.applicabilitydomain')),
|
||||
('data_packages', models.ManyToManyField(related_name='%(app_label)s_%(class)s_data_packages', to='epdb.package', verbose_name='Data Packages')),
|
||||
('eval_packages', models.ManyToManyField(related_name='%(app_label)s_%(class)s_eval_packages', to='epdb.package', verbose_name='Evaluation Packages')),
|
||||
('rule_packages', models.ManyToManyField(related_name='%(app_label)s_%(class)s_rule_packages', to='epdb.package', verbose_name='Rule Packages')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
bases=('epdb.epmodel',),
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='RuleBaseRelativeReasoning',
|
||||
),
|
||||
]
|
||||
@ -1,18 +0,0 @@
|
||||
# Generated by Django 5.2.1 on 2025-09-11 06:21
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epdb', '0004_alter_mlrelativereasoning_options_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='group',
|
||||
name='group_member',
|
||||
field=models.ManyToManyField(blank=True, related_name='groups_in_group', to='epdb.group', verbose_name='Group member'),
|
||||
),
|
||||
]
|
||||
@ -1,23 +0,0 @@
|
||||
# Generated by Django 5.2.1 on 2025-09-18 06:42
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epdb', '0005_alter_group_group_member'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='mlrelativereasoning',
|
||||
name='multigen_eval',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='rulebasedrelativereasoning',
|
||||
name='multigen_eval',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@ -1,53 +0,0 @@
|
||||
# Generated by Django 5.2.1 on 2025-10-07 08:19
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('epdb', '0006_mlrelativereasoning_multigen_eval_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='enviformer',
|
||||
options={},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='enviformer',
|
||||
name='app_domain',
|
||||
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to='epdb.applicabilitydomain'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='enviformer',
|
||||
name='data_packages',
|
||||
field=models.ManyToManyField(related_name='%(app_label)s_%(class)s_data_packages', to='epdb.package', verbose_name='Data Packages'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='enviformer',
|
||||
name='eval_packages',
|
||||
field=models.ManyToManyField(related_name='%(app_label)s_%(class)s_eval_packages', to='epdb.package', verbose_name='Evaluation Packages'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='enviformer',
|
||||
name='eval_results',
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='enviformer',
|
||||
name='model_status',
|
||||
field=models.CharField(choices=[('INITIAL', 'Initial'), ('INITIALIZING', 'Model is initializing.'), ('BUILDING', 'Model is building.'), ('BUILT_NOT_EVALUATED', 'Model is built and can be used for predictions, Model is not evaluated yet.'), ('EVALUATING', 'Model is evaluating'), ('FINISHED', 'Model has finished building and evaluation.'), ('ERROR', 'Model has failed.')], default='INITIAL'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='enviformer',
|
||||
name='multigen_eval',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='enviformer',
|
||||
name='rule_packages',
|
||||
field=models.ManyToManyField(related_name='%(app_label)s_%(class)s_rule_packages', to='epdb.package', verbose_name='Rule Packages'),
|
||||
),
|
||||
]
|
||||
@ -1,64 +0,0 @@
|
||||
# Generated by Django 5.2.7 on 2025-10-10 06:58
|
||||
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import model_utils.fields
|
||||
import uuid
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("epdb", "0007_alter_enviformer_options_enviformer_app_domain_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="EnzymeLink",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now, editable=False, verbose_name="created"
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now, editable=False, verbose_name="modified"
|
||||
),
|
||||
),
|
||||
(
|
||||
"uuid",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4, unique=True, verbose_name="UUID of this object"
|
||||
),
|
||||
),
|
||||
("name", models.TextField(default="no name", verbose_name="Name")),
|
||||
(
|
||||
"description",
|
||||
models.TextField(default="no description", verbose_name="Descriptions"),
|
||||
),
|
||||
("url", models.TextField(null=True, unique=True, verbose_name="URL")),
|
||||
("kv", models.JSONField(blank=True, default=dict, null=True)),
|
||||
("ec_number", models.TextField(verbose_name="EC Number")),
|
||||
("classification_level", models.IntegerField(verbose_name="Classification Level")),
|
||||
("linking_method", models.TextField(verbose_name="Linking Method")),
|
||||
("edge_evidence", models.ManyToManyField(to="epdb.edge")),
|
||||
("reaction_evidence", models.ManyToManyField(to="epdb.reaction")),
|
||||
(
|
||||
"rule",
|
||||
models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="epdb.rule"),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
]
|
||||
@ -1,66 +0,0 @@
|
||||
# Generated by Django 5.2.7 on 2025-10-27 09:39
|
||||
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import model_utils.fields
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("epdb", "0008_enzymelink"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="JobLog",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now, editable=False, verbose_name="created"
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now, editable=False, verbose_name="modified"
|
||||
),
|
||||
),
|
||||
("task_id", models.UUIDField(unique=True)),
|
||||
("job_name", models.TextField()),
|
||||
(
|
||||
"status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("INITIAL", "Initial"),
|
||||
("SUCCESS", "Success"),
|
||||
("FAILURE", "Failure"),
|
||||
("REVOKED", "Revoked"),
|
||||
("IGNORED", "Ignored"),
|
||||
],
|
||||
default="INITIAL",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
("done_at", models.DateTimeField(blank=True, default=None, null=True)),
|
||||
("task_result", models.TextField(blank=True, default=None, null=True)),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
]
|
||||
934
epdb/models.py
934
epdb/models.py
File diff suppressed because it is too large
Load Diff
199
epdb/tasks.py
199
epdb/tasks.py
@ -1,19 +1,24 @@
|
||||
import csv
|
||||
import io
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Any, Callable, List, Optional
|
||||
from uuid import uuid4
|
||||
|
||||
from celery import shared_task
|
||||
from celery.utils.functional import LRUCache
|
||||
from django.conf import settings as s
|
||||
from django.core.mail import EmailMultiAlternatives
|
||||
from django.utils import timezone
|
||||
|
||||
from epdb.logic import SPathway
|
||||
from epdb.models import EPModel, JobLog, Node, Package, Pathway, Rule, Setting, User, Edge
|
||||
from epdb.models import Edge, EPModel, JobLog, Node, Pathway, Rule, Setting, User
|
||||
from utilities.chem import FormatConverter
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
ML_CACHE = LRUCache(3) # Cache the three most recent ML models to reduce load times.
|
||||
|
||||
Package = s.GET_PACKAGE_MODEL()
|
||||
|
||||
|
||||
def get_ml_model(model_pk: int):
|
||||
if model_pk not in ML_CACHE:
|
||||
@ -29,11 +34,11 @@ def dispatch_eager(user: "User", job: Callable, *args, **kwargs):
|
||||
log.task_id = uuid4()
|
||||
log.job_name = job.__name__
|
||||
log.status = "SUCCESS"
|
||||
log.done_at = datetime.now()
|
||||
log.done_at = timezone.now()
|
||||
log.task_result = str(x) if x else None
|
||||
log.save()
|
||||
|
||||
return x
|
||||
return log, x
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
raise e
|
||||
@ -49,7 +54,7 @@ def dispatch(user: "User", job: Callable, *args, **kwargs):
|
||||
log.status = "INITIAL"
|
||||
log.save()
|
||||
|
||||
return x.result
|
||||
return log
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
raise e
|
||||
@ -69,7 +74,31 @@ def predict_simple(model_pk: int, smiles: str):
|
||||
|
||||
@shared_task(queue="background")
|
||||
def send_registration_mail(user_pk: int):
|
||||
pass
|
||||
u = User.objects.get(id=user_pk)
|
||||
|
||||
tpl = """Welcome {username}!,
|
||||
|
||||
Thank you for your interest in enviPath.
|
||||
|
||||
The public system is intended for non-commercial use only.
|
||||
We will review your account details and usually activate your account within 24 hours.
|
||||
Once activated, you will be notified by email.
|
||||
|
||||
If we have any questions, we will contact you at this email address.
|
||||
|
||||
Best regards,
|
||||
|
||||
enviPath team"""
|
||||
|
||||
msg = EmailMultiAlternatives(
|
||||
"Your enviPath account",
|
||||
tpl.format(username=u.username),
|
||||
"admin@envipath.org",
|
||||
[u.email],
|
||||
bcc=["admin@envipath.org"],
|
||||
)
|
||||
|
||||
msg.send(fail_silently=False)
|
||||
|
||||
|
||||
@shared_task(bind=True, queue="model")
|
||||
@ -136,14 +165,25 @@ def predict(
|
||||
pred_setting_pk: int,
|
||||
limit: Optional[int] = None,
|
||||
node_pk: Optional[int] = None,
|
||||
setting_overrides: Optional[dict] = None,
|
||||
) -> Pathway:
|
||||
pw = Pathway.objects.get(id=pw_pk)
|
||||
setting = Setting.objects.get(id=pred_setting_pk)
|
||||
|
||||
if setting_overrides:
|
||||
for k, v in setting_overrides.items():
|
||||
setattr(setting, k, v)
|
||||
|
||||
# If the setting has a model add/restore it from the cache
|
||||
if setting.model is not None:
|
||||
setting.model = get_ml_model(setting.model.pk)
|
||||
|
||||
pw.kv.update(**{"status": "running"})
|
||||
kv = {"status": "running"}
|
||||
|
||||
if setting_overrides:
|
||||
kv["setting_overrides"] = setting_overrides
|
||||
|
||||
pw.kv.update(**kv)
|
||||
pw.save()
|
||||
|
||||
if JobLog.objects.filter(task_id=self.request.id).exists():
|
||||
@ -168,10 +208,12 @@ def predict(
|
||||
spw = SPathway.from_pathway(pw)
|
||||
spw.predict_step(from_node=n)
|
||||
else:
|
||||
raise ValueError("Neither limit nor node_pk given!")
|
||||
spw = SPathway(prediction_setting=setting, persist=pw)
|
||||
spw.predict()
|
||||
|
||||
except Exception as e:
|
||||
pw.kv.update({"status": "failed"})
|
||||
pw.kv.update(**{"error": str(e)})
|
||||
pw.save()
|
||||
|
||||
if JobLog.objects.filter(task_id=self.request.id).exists():
|
||||
@ -281,3 +323,144 @@ def identify_missing_rules(
|
||||
buffer.seek(0)
|
||||
|
||||
return buffer.getvalue()
|
||||
|
||||
|
||||
@shared_task(bind=True, queue="background")
|
||||
def engineer_pathways(self, pw_pks: List[int], setting_pk: int, target_package_pk: int):
|
||||
from utilities.misc import PathwayUtils
|
||||
|
||||
setting = Setting.objects.get(pk=setting_pk)
|
||||
# Temporarily set model_threshold to 0.0 to keep all tps
|
||||
setting.model_threshold = 0.0
|
||||
|
||||
target = Package.objects.get(pk=target_package_pk)
|
||||
|
||||
intermediate_pathways = []
|
||||
predicted_pathways = []
|
||||
|
||||
for pw in Pathway.objects.filter(pk__in=pw_pks):
|
||||
pu = PathwayUtils(pw)
|
||||
|
||||
eng_pw, node_to_snode_mapping, intermediates = pu.engineer(setting)
|
||||
|
||||
# If we've found intermediates, do the following
|
||||
# - Get a copy of the original pathway and add intermediates
|
||||
# - Store the predicted pathway for further investigation
|
||||
if len(intermediates):
|
||||
copy_mapping = {}
|
||||
copied_pw = pw.copy(target, copy_mapping)
|
||||
copied_pw.name = f"{copied_pw.name} (Engineered)"
|
||||
copied_pw.description = f"The original Pathway can be found here: {pw.url}"
|
||||
copied_pw.save()
|
||||
|
||||
for inter in intermediates:
|
||||
start = copy_mapping[inter[0]]
|
||||
end = copy_mapping[inter[1]]
|
||||
start_snode = inter[2]
|
||||
end_snode = inter[3]
|
||||
for idx, intermediate_edge in enumerate(inter[4]):
|
||||
smiles_to_node = {}
|
||||
|
||||
snodes_to_create = list(
|
||||
set(intermediate_edge.educts + intermediate_edge.products)
|
||||
)
|
||||
|
||||
for snode in snodes_to_create:
|
||||
if snode == start_snode or snode == end_snode:
|
||||
smiles_to_node[snode.smiles] = start if snode == start_snode else end
|
||||
continue
|
||||
|
||||
if snode.smiles not in smiles_to_node:
|
||||
n = Node.create(copied_pw, smiles=snode.smiles, depth=snode.depth)
|
||||
# Used in viz to highlight intermediates
|
||||
n.kv.update({"is_engineered_intermediate": True})
|
||||
n.save()
|
||||
smiles_to_node[snode.smiles] = n
|
||||
|
||||
Edge.create(
|
||||
copied_pw,
|
||||
[smiles_to_node[educt.smiles] for educt in intermediate_edge.educts],
|
||||
[smiles_to_node[product.smiles] for product in intermediate_edge.products],
|
||||
rule=intermediate_edge.rule,
|
||||
)
|
||||
|
||||
# Persist the predicted pathway
|
||||
pred_pw = pu.spathway_to_pathway(target, eng_pw, name=f"{pw.name} (Predicted)")
|
||||
|
||||
intermediate_pathways.append(copied_pw.url)
|
||||
predicted_pathways.append(pred_pw.url)
|
||||
|
||||
return intermediate_pathways, predicted_pathways
|
||||
|
||||
|
||||
@shared_task(bind=True, queue="background")
|
||||
def batch_predict(
|
||||
self,
|
||||
substrates: List[str] | List[List[str]],
|
||||
prediction_setting_pk: int,
|
||||
target_package_pk: int,
|
||||
num_tps: int = 50,
|
||||
):
|
||||
target_package = Package.objects.get(pk=target_package_pk)
|
||||
prediction_setting = Setting.objects.get(pk=prediction_setting_pk)
|
||||
|
||||
if len(substrates) == 0:
|
||||
raise ValueError("No substrates given!")
|
||||
|
||||
is_pair = isinstance(substrates[0], list)
|
||||
|
||||
substrate_and_names = []
|
||||
if not is_pair:
|
||||
for sub in substrates:
|
||||
substrate_and_names.append([sub, None])
|
||||
else:
|
||||
substrate_and_names = substrates
|
||||
|
||||
# Check prerequisite that we can standardize all substrates
|
||||
standardized_substrates_and_smiles = []
|
||||
for substrate in substrate_and_names:
|
||||
try:
|
||||
stand_smiles = FormatConverter.standardize(substrate[0])
|
||||
standardized_substrates_and_smiles.append([stand_smiles, substrate[1]])
|
||||
except ValueError:
|
||||
raise ValueError(
|
||||
f'Pathway prediction failed as standardization of SMILES "{substrate}" failed!'
|
||||
)
|
||||
|
||||
pathways = []
|
||||
|
||||
for pair in standardized_substrates_and_smiles:
|
||||
pw = Pathway.create(
|
||||
target_package,
|
||||
pair[0],
|
||||
name=pair[1],
|
||||
predicted=True,
|
||||
)
|
||||
|
||||
# set mode and setting
|
||||
pw.setting = prediction_setting
|
||||
pw.kv.update({"mode": "predict"})
|
||||
pw.save()
|
||||
|
||||
predict(
|
||||
pw.pk,
|
||||
prediction_setting.pk,
|
||||
limit=None,
|
||||
setting_overrides={
|
||||
"max_nodes": num_tps,
|
||||
"max_depth": num_tps,
|
||||
"model_threshold": 0.001,
|
||||
},
|
||||
)
|
||||
|
||||
pathways.append(pw)
|
||||
|
||||
buffer = io.StringIO()
|
||||
|
||||
for idx, pw in enumerate(pathways):
|
||||
# Carry out header only for the first pathway
|
||||
buffer.write(pw.to_csv(include_header=idx == 0, include_pathway_url=True))
|
||||
|
||||
buffer.seek(0)
|
||||
|
||||
return buffer.getvalue()
|
||||
|
||||
18
epdb/urls.py
18
epdb/urls.py
@ -48,6 +48,8 @@ urlpatterns = [
|
||||
re_path(r"^user$", v.users, name="users"),
|
||||
re_path(r"^group$", v.groups, name="groups"),
|
||||
re_path(r"^search$", v.search, name="search"),
|
||||
re_path(r"^predict$", v.predict_pathway, name="predict_pathway"),
|
||||
re_path(r"^batch-predict$", v.batch_predict_pathway, name="batch_predict_pathway"),
|
||||
# User Detail
|
||||
re_path(rf"^user/(?P<user_uuid>{UUID})", v.user, name="user"),
|
||||
# Group Detail
|
||||
@ -141,6 +143,11 @@ urlpatterns = [
|
||||
v.package_pathway,
|
||||
name="package pathway detail",
|
||||
),
|
||||
re_path(
|
||||
rf"^package/(?P<package_uuid>{UUID})/predict$",
|
||||
v.package_predict_pathway,
|
||||
name="package predict pathway",
|
||||
),
|
||||
# Pathway Nodes
|
||||
re_path(
|
||||
rf"^package/(?P<package_uuid>{UUID})/pathway/(?P<pathway_uuid>{UUID})/node$",
|
||||
@ -190,7 +197,16 @@ urlpatterns = [
|
||||
re_path(r"^indigo/dearomatize$", v.dearomatize, name="indigo_dearomatize"),
|
||||
re_path(r"^indigo/layout$", v.layout, name="indigo_layout"),
|
||||
re_path(r"^depict$", v.depict, name="depict"),
|
||||
re_path(r"^jobs", v.jobs, name="jobs"),
|
||||
path("jobs", v.jobs, name="jobs"),
|
||||
path("jobs/<uuid:job_uuid>", v.job, name="job detail"),
|
||||
# OAuth Stuff
|
||||
path("o/userinfo/", v.userinfo, name="oauth_userinfo"),
|
||||
# Static Pages
|
||||
re_path(r"^terms$", v.static_terms_of_use, name="terms_of_use"),
|
||||
re_path(r"^privacy$", v.static_privacy_policy, name="privacy_policy"),
|
||||
re_path(r"^cookie-policy$", v.static_cookie_policy, name="cookie_policy"),
|
||||
re_path(r"^about$", v.static_about_us, name="about_us"),
|
||||
re_path(r"^contact$", v.static_contact_support, name="contact_support"),
|
||||
re_path(r"^careers$", v.static_careers, name="careers"),
|
||||
re_path(r"^cite$", v.static_cite, name="cite"),
|
||||
]
|
||||
|
||||
1165
epdb/views.py
1165
epdb/views.py
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,24 +1,21 @@
|
||||
import gzip
|
||||
import json
|
||||
import logging
|
||||
import os.path
|
||||
from datetime import datetime
|
||||
|
||||
from django.conf import settings as s
|
||||
from django.http import HttpResponseNotAllowed
|
||||
from django.shortcuts import render
|
||||
|
||||
from epdb.logic import PackageManager
|
||||
from epdb.models import Rule, SimpleAmbitRule, Package, CompoundStructure
|
||||
from epdb.views import get_base_context, _anonymous_or_real
|
||||
from utilities.chem import FormatConverter
|
||||
|
||||
|
||||
from rdkit import Chem
|
||||
from rdkit.Chem.MolStandardize import rdMolStandardize
|
||||
|
||||
from epdb.models import CompoundStructure, Rule, SimpleAmbitRule
|
||||
from epdb.views import get_base_context
|
||||
from utilities.chem import FormatConverter
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
Package = s.GET_PACKAGE_MODEL()
|
||||
|
||||
|
||||
def normalize_smiles(smiles):
|
||||
m1 = Chem.MolFromSmiles(smiles)
|
||||
@ -59,9 +56,7 @@ def run_both_engines(SMILES, SMIRKS):
|
||||
set(
|
||||
[
|
||||
normalize_smiles(str(x))
|
||||
for x in FormatConverter.sanitize_smiles(
|
||||
[str(s) for s in all_rdkit_prods]
|
||||
)[0]
|
||||
for x in FormatConverter.sanitize_smiles([str(s) for s in all_rdkit_prods])[0]
|
||||
]
|
||||
)
|
||||
)
|
||||
@ -81,12 +76,9 @@ def migration(request):
|
||||
open(s.BASE_DIR / "fixtures" / "migration_status_per_rule.json")
|
||||
)
|
||||
else:
|
||||
BBD = Package.objects.get(
|
||||
url="http://localhost:8000/package/32de3cf4-e3e6-4168-956e-32fa5ddb0ce1"
|
||||
)
|
||||
BBD = Package.objects.get(uuid="32de3cf4-e3e6-4168-956e-32fa5ddb0ce1")
|
||||
ALL_SMILES = [
|
||||
cs.smiles
|
||||
for cs in CompoundStructure.objects.filter(compound__package=BBD)
|
||||
cs.smiles for cs in CompoundStructure.objects.filter(compound__package=BBD)
|
||||
]
|
||||
RULES = SimpleAmbitRule.objects.filter(package=BBD)
|
||||
|
||||
@ -142,9 +134,7 @@ def migration(request):
|
||||
)
|
||||
|
||||
for r in migration_status["results"]:
|
||||
r["detail_url"] = r["detail_url"].replace(
|
||||
"http://localhost:8000", s.SERVER_URL
|
||||
)
|
||||
r["detail_url"] = r["detail_url"].replace("http://localhost:8000", s.SERVER_URL)
|
||||
|
||||
context.update(**migration_status)
|
||||
|
||||
@ -152,12 +142,10 @@ def migration(request):
|
||||
|
||||
|
||||
def migration_detail(request, package_uuid, rule_uuid):
|
||||
current_user = _anonymous_or_real(request)
|
||||
|
||||
if request.method == "GET":
|
||||
context = get_base_context(request)
|
||||
|
||||
BBD = Package.objects.get(name="EAWAG-BBD")
|
||||
BBD = Package.objects.get(uuid="32de3cf4-e3e6-4168-956e-32fa5ddb0ce1")
|
||||
STRUCTURES = CompoundStructure.objects.filter(compound__package=BBD)
|
||||
rule = Rule.objects.get(package=BBD, uuid=rule_uuid)
|
||||
|
||||
@ -235,9 +223,7 @@ def compare(request):
|
||||
context["smirks"] = (
|
||||
"[#1,#6:6][#7;X3;!$(NC1CC1)!$([N][C]=O)!$([!#8]CNC=O):1]([#1,#6:7])[#6;A;X4:2][H:3]>>[#1,#6:6][#7;X3:1]([#1,#6:7])[H:3].[#6;A:2]=O"
|
||||
)
|
||||
context["smiles"] = (
|
||||
"C(CC(=O)N[C@@H](CS[Se-])C(=O)NCC(=O)[O-])[C@@H](C(=O)[O-])N"
|
||||
)
|
||||
context["smiles"] = "C(CC(=O)N[C@@H](CS[Se-])C(=O)NCC(=O)[O-])[C@@H](C(=O)[O-])N"
|
||||
return render(request, "compare.html", context)
|
||||
|
||||
elif request.method == "POST":
|
||||
|
||||
25
package.json
Normal file
25
package.json
Normal file
@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "envipy",
|
||||
"version": "1.0.0",
|
||||
"private": true,
|
||||
"description": "enviPath UI - Tailwind CSS + DaisyUI",
|
||||
"scripts": {
|
||||
"dev": "tailwindcss -i static/css/input.css -o static/css/output.css --watch=always",
|
||||
"build": "tailwindcss -i static/css/input.css -o static/css/output.css --minify"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tailwindcss/cli": "^4.1.18",
|
||||
"@tailwindcss/postcss": "^4.1.18",
|
||||
"daisyui": "^5.5.14",
|
||||
"postcss": "^8.5.6",
|
||||
"prettier": "^3.7.4",
|
||||
"prettier-plugin-jinja-template": "^2.1.0",
|
||||
"prettier-plugin-tailwindcss": "^0.7.2",
|
||||
"tailwindcss": "^4.1.18"
|
||||
},
|
||||
"keywords": [
|
||||
"django",
|
||||
"tailwindcss",
|
||||
"daisyui"
|
||||
]
|
||||
}
|
||||
740
pnpm-lock.yaml
generated
Normal file
740
pnpm-lock.yaml
generated
Normal file
@ -0,0 +1,740 @@
|
||||
lockfileVersion: '9.0'
|
||||
|
||||
settings:
|
||||
autoInstallPeers: true
|
||||
excludeLinksFromLockfile: false
|
||||
|
||||
importers:
|
||||
|
||||
.:
|
||||
devDependencies:
|
||||
'@tailwindcss/cli':
|
||||
specifier: ^4.1.18
|
||||
version: 4.1.18
|
||||
'@tailwindcss/postcss':
|
||||
specifier: ^4.1.18
|
||||
version: 4.1.18
|
||||
daisyui:
|
||||
specifier: ^5.5.14
|
||||
version: 5.5.14
|
||||
postcss:
|
||||
specifier: ^8.5.6
|
||||
version: 8.5.6
|
||||
prettier:
|
||||
specifier: ^3.7.4
|
||||
version: 3.7.4
|
||||
prettier-plugin-jinja-template:
|
||||
specifier: ^2.1.0
|
||||
version: 2.1.0(prettier@3.7.4)
|
||||
prettier-plugin-tailwindcss:
|
||||
specifier: ^0.7.2
|
||||
version: 0.7.2(prettier@3.7.4)
|
||||
tailwindcss:
|
||||
specifier: ^4.1.18
|
||||
version: 4.1.18
|
||||
|
||||
packages:
|
||||
|
||||
'@alloc/quick-lru@5.2.0':
|
||||
resolution: {integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==}
|
||||
engines: {node: '>=10'}
|
||||
|
||||
'@jridgewell/gen-mapping@0.3.13':
|
||||
resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==}
|
||||
|
||||
'@jridgewell/remapping@2.3.5':
|
||||
resolution: {integrity: sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==}
|
||||
|
||||
'@jridgewell/resolve-uri@3.1.2':
|
||||
resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==}
|
||||
engines: {node: '>=6.0.0'}
|
||||
|
||||
'@jridgewell/sourcemap-codec@1.5.5':
|
||||
resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==}
|
||||
|
||||
'@jridgewell/trace-mapping@0.3.31':
|
||||
resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==}
|
||||
|
||||
'@parcel/watcher-android-arm64@2.5.1':
|
||||
resolution: {integrity: sha512-KF8+j9nNbUN8vzOFDpRMsaKBHZ/mcjEjMToVMJOhTozkDonQFFrRcfdLWn6yWKCmJKmdVxSgHiYvTCef4/qcBA==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
cpu: [arm64]
|
||||
os: [android]
|
||||
|
||||
'@parcel/watcher-darwin-arm64@2.5.1':
|
||||
resolution: {integrity: sha512-eAzPv5osDmZyBhou8PoF4i6RQXAfeKL9tjb3QzYuccXFMQU0ruIc/POh30ePnaOyD1UXdlKguHBmsTs53tVoPw==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
cpu: [arm64]
|
||||
os: [darwin]
|
||||
|
||||
'@parcel/watcher-darwin-x64@2.5.1':
|
||||
resolution: {integrity: sha512-1ZXDthrnNmwv10A0/3AJNZ9JGlzrF82i3gNQcWOzd7nJ8aj+ILyW1MTxVk35Db0u91oD5Nlk9MBiujMlwmeXZg==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
cpu: [x64]
|
||||
os: [darwin]
|
||||
|
||||
'@parcel/watcher-freebsd-x64@2.5.1':
|
||||
resolution: {integrity: sha512-SI4eljM7Flp9yPuKi8W0ird8TI/JK6CSxju3NojVI6BjHsTyK7zxA9urjVjEKJ5MBYC+bLmMcbAWlZ+rFkLpJQ==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
cpu: [x64]
|
||||
os: [freebsd]
|
||||
|
||||
'@parcel/watcher-linux-arm-glibc@2.5.1':
|
||||
resolution: {integrity: sha512-RCdZlEyTs8geyBkkcnPWvtXLY44BCeZKmGYRtSgtwwnHR4dxfHRG3gR99XdMEdQ7KeiDdasJwwvNSF5jKtDwdA==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
cpu: [arm]
|
||||
os: [linux]
|
||||
|
||||
'@parcel/watcher-linux-arm-musl@2.5.1':
|
||||
resolution: {integrity: sha512-6E+m/Mm1t1yhB8X412stiKFG3XykmgdIOqhjWj+VL8oHkKABfu/gjFj8DvLrYVHSBNC+/u5PeNrujiSQ1zwd1Q==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
cpu: [arm]
|
||||
os: [linux]
|
||||
|
||||
'@parcel/watcher-linux-arm64-glibc@2.5.1':
|
||||
resolution: {integrity: sha512-LrGp+f02yU3BN9A+DGuY3v3bmnFUggAITBGriZHUREfNEzZh/GO06FF5u2kx8x+GBEUYfyTGamol4j3m9ANe8w==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
|
||||
'@parcel/watcher-linux-arm64-musl@2.5.1':
|
||||
resolution: {integrity: sha512-cFOjABi92pMYRXS7AcQv9/M1YuKRw8SZniCDw0ssQb/noPkRzA+HBDkwmyOJYp5wXcsTrhxO0zq1U11cK9jsFg==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
|
||||
'@parcel/watcher-linux-x64-glibc@2.5.1':
|
||||
resolution: {integrity: sha512-GcESn8NZySmfwlTsIur+49yDqSny2IhPeZfXunQi48DMugKeZ7uy1FX83pO0X22sHntJ4Ub+9k34XQCX+oHt2A==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
|
||||
'@parcel/watcher-linux-x64-musl@2.5.1':
|
||||
resolution: {integrity: sha512-n0E2EQbatQ3bXhcH2D1XIAANAcTZkQICBPVaxMeaCVBtOpBZpWJuf7LwyWPSBDITb7In8mqQgJ7gH8CILCURXg==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
|
||||
'@parcel/watcher-win32-arm64@2.5.1':
|
||||
resolution: {integrity: sha512-RFzklRvmc3PkjKjry3hLF9wD7ppR4AKcWNzH7kXR7GUe0Igb3Nz8fyPwtZCSquGrhU5HhUNDr/mKBqj7tqA2Vw==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
cpu: [arm64]
|
||||
os: [win32]
|
||||
|
||||
'@parcel/watcher-win32-ia32@2.5.1':
|
||||
resolution: {integrity: sha512-c2KkcVN+NJmuA7CGlaGD1qJh1cLfDnQsHjE89E60vUEMlqduHGCdCLJCID5geFVM0dOtA3ZiIO8BoEQmzQVfpQ==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
cpu: [ia32]
|
||||
os: [win32]
|
||||
|
||||
'@parcel/watcher-win32-x64@2.5.1':
|
||||
resolution: {integrity: sha512-9lHBdJITeNR++EvSQVUcaZoWupyHfXe1jZvGZ06O/5MflPcuPLtEphScIBL+AiCWBO46tDSHzWyD0uDmmZqsgA==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
cpu: [x64]
|
||||
os: [win32]
|
||||
|
||||
'@parcel/watcher@2.5.1':
|
||||
resolution: {integrity: sha512-dfUnCxiN9H4ap84DvD2ubjw+3vUNpstxa0TneY/Paat8a3R4uQZDLSvWjmznAY/DoahqTHl9V46HF/Zs3F29pg==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
|
||||
'@tailwindcss/cli@4.1.18':
|
||||
resolution: {integrity: sha512-sMZ+lZbDyxwjD2E0L7oRUjJ01Ffjtme5OtjvvnC+cV4CEDcbqzbp25TCpxHj6kWLU9+DlqJOiNgSOgctC2aZmg==}
|
||||
hasBin: true
|
||||
|
||||
'@tailwindcss/node@4.1.18':
|
||||
resolution: {integrity: sha512-DoR7U1P7iYhw16qJ49fgXUlry1t4CpXeErJHnQ44JgTSKMaZUdf17cfn5mHchfJ4KRBZRFA/Coo+MUF5+gOaCQ==}
|
||||
|
||||
'@tailwindcss/oxide-android-arm64@4.1.18':
|
||||
resolution: {integrity: sha512-dJHz7+Ugr9U/diKJA0W6N/6/cjI+ZTAoxPf9Iz9BFRF2GzEX8IvXxFIi/dZBloVJX/MZGvRuFA9rqwdiIEZQ0Q==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [arm64]
|
||||
os: [android]
|
||||
|
||||
'@tailwindcss/oxide-darwin-arm64@4.1.18':
|
||||
resolution: {integrity: sha512-Gc2q4Qhs660bhjyBSKgq6BYvwDz4G+BuyJ5H1xfhmDR3D8HnHCmT/BSkvSL0vQLy/nkMLY20PQ2OoYMO15Jd0A==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [arm64]
|
||||
os: [darwin]
|
||||
|
||||
'@tailwindcss/oxide-darwin-x64@4.1.18':
|
||||
resolution: {integrity: sha512-FL5oxr2xQsFrc3X9o1fjHKBYBMD1QZNyc1Xzw/h5Qu4XnEBi3dZn96HcHm41c/euGV+GRiXFfh2hUCyKi/e+yw==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [x64]
|
||||
os: [darwin]
|
||||
|
||||
'@tailwindcss/oxide-freebsd-x64@4.1.18':
|
||||
resolution: {integrity: sha512-Fj+RHgu5bDodmV1dM9yAxlfJwkkWvLiRjbhuO2LEtwtlYlBgiAT4x/j5wQr1tC3SANAgD+0YcmWVrj8R9trVMA==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [x64]
|
||||
os: [freebsd]
|
||||
|
||||
'@tailwindcss/oxide-linux-arm-gnueabihf@4.1.18':
|
||||
resolution: {integrity: sha512-Fp+Wzk/Ws4dZn+LV2Nqx3IilnhH51YZoRaYHQsVq3RQvEl+71VGKFpkfHrLM/Li+kt5c0DJe/bHXK1eHgDmdiA==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [arm]
|
||||
os: [linux]
|
||||
|
||||
'@tailwindcss/oxide-linux-arm64-gnu@4.1.18':
|
||||
resolution: {integrity: sha512-S0n3jboLysNbh55Vrt7pk9wgpyTTPD0fdQeh7wQfMqLPM/Hrxi+dVsLsPrycQjGKEQk85Kgbx+6+QnYNiHalnw==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
|
||||
'@tailwindcss/oxide-linux-arm64-musl@4.1.18':
|
||||
resolution: {integrity: sha512-1px92582HkPQlaaCkdRcio71p8bc8i/ap5807tPRDK/uw953cauQBT8c5tVGkOwrHMfc2Yh6UuxaH4vtTjGvHg==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
|
||||
'@tailwindcss/oxide-linux-x64-gnu@4.1.18':
|
||||
resolution: {integrity: sha512-v3gyT0ivkfBLoZGF9LyHmts0Isc8jHZyVcbzio6Wpzifg/+5ZJpDiRiUhDLkcr7f/r38SWNe7ucxmGW3j3Kb/g==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
|
||||
'@tailwindcss/oxide-linux-x64-musl@4.1.18':
|
||||
resolution: {integrity: sha512-bhJ2y2OQNlcRwwgOAGMY0xTFStt4/wyU6pvI6LSuZpRgKQwxTec0/3Scu91O8ir7qCR3AuepQKLU/kX99FouqQ==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
|
||||
'@tailwindcss/oxide-wasm32-wasi@4.1.18':
|
||||
resolution: {integrity: sha512-LffYTvPjODiP6PT16oNeUQJzNVyJl1cjIebq/rWWBF+3eDst5JGEFSc5cWxyRCJ0Mxl+KyIkqRxk1XPEs9x8TA==}
|
||||
engines: {node: '>=14.0.0'}
|
||||
cpu: [wasm32]
|
||||
bundledDependencies:
|
||||
- '@napi-rs/wasm-runtime'
|
||||
- '@emnapi/core'
|
||||
- '@emnapi/runtime'
|
||||
- '@tybys/wasm-util'
|
||||
- '@emnapi/wasi-threads'
|
||||
- tslib
|
||||
|
||||
'@tailwindcss/oxide-win32-arm64-msvc@4.1.18':
|
||||
resolution: {integrity: sha512-HjSA7mr9HmC8fu6bdsZvZ+dhjyGCLdotjVOgLA2vEqxEBZaQo9YTX4kwgEvPCpRh8o4uWc4J/wEoFzhEmjvPbA==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [arm64]
|
||||
os: [win32]
|
||||
|
||||
'@tailwindcss/oxide-win32-x64-msvc@4.1.18':
|
||||
resolution: {integrity: sha512-bJWbyYpUlqamC8dpR7pfjA0I7vdF6t5VpUGMWRkXVE3AXgIZjYUYAK7II1GNaxR8J1SSrSrppRar8G++JekE3Q==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [x64]
|
||||
os: [win32]
|
||||
|
||||
'@tailwindcss/oxide@4.1.18':
|
||||
resolution: {integrity: sha512-EgCR5tTS5bUSKQgzeMClT6iCY3ToqE1y+ZB0AKldj809QXk1Y+3jB0upOYZrn9aGIzPtUsP7sX4QQ4XtjBB95A==}
|
||||
engines: {node: '>= 10'}
|
||||
|
||||
'@tailwindcss/postcss@4.1.18':
|
||||
resolution: {integrity: sha512-Ce0GFnzAOuPyfV5SxjXGn0CubwGcuDB0zcdaPuCSzAa/2vII24JTkH+I6jcbXLb1ctjZMZZI6OjDaLPJQL1S0g==}
|
||||
|
||||
braces@3.0.3:
|
||||
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
daisyui@5.5.14:
|
||||
resolution: {integrity: sha512-L47rvw7I7hK68TA97VB8Ee0woHew+/ohR6Lx6Ah/krfISOqcG4My7poNpX5Mo5/ytMxiR40fEaz6njzDi7cuSg==}
|
||||
|
||||
detect-libc@1.0.3:
|
||||
resolution: {integrity: sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==}
|
||||
engines: {node: '>=0.10'}
|
||||
hasBin: true
|
||||
|
||||
detect-libc@2.1.2:
|
||||
resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
enhanced-resolve@5.18.4:
|
||||
resolution: {integrity: sha512-LgQMM4WXU3QI+SYgEc2liRgznaD5ojbmY3sb8LxyguVkIg5FxdpTkvk72te2R38/TGKxH634oLxXRGY6d7AP+Q==}
|
||||
engines: {node: '>=10.13.0'}
|
||||
|
||||
fill-range@7.1.1:
|
||||
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
graceful-fs@4.2.11:
|
||||
resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==}
|
||||
|
||||
is-extglob@2.1.1:
|
||||
resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
is-glob@4.0.3:
|
||||
resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
is-number@7.0.0:
|
||||
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
|
||||
engines: {node: '>=0.12.0'}
|
||||
|
||||
jiti@2.6.1:
|
||||
resolution: {integrity: sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==}
|
||||
hasBin: true
|
||||
|
||||
lightningcss-android-arm64@1.30.2:
|
||||
resolution: {integrity: sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A==}
|
||||
engines: {node: '>= 12.0.0'}
|
||||
cpu: [arm64]
|
||||
os: [android]
|
||||
|
||||
lightningcss-darwin-arm64@1.30.2:
|
||||
resolution: {integrity: sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA==}
|
||||
engines: {node: '>= 12.0.0'}
|
||||
cpu: [arm64]
|
||||
os: [darwin]
|
||||
|
||||
lightningcss-darwin-x64@1.30.2:
|
||||
resolution: {integrity: sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ==}
|
||||
engines: {node: '>= 12.0.0'}
|
||||
cpu: [x64]
|
||||
os: [darwin]
|
||||
|
||||
lightningcss-freebsd-x64@1.30.2:
|
||||
resolution: {integrity: sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA==}
|
||||
engines: {node: '>= 12.0.0'}
|
||||
cpu: [x64]
|
||||
os: [freebsd]
|
||||
|
||||
lightningcss-linux-arm-gnueabihf@1.30.2:
|
||||
resolution: {integrity: sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA==}
|
||||
engines: {node: '>= 12.0.0'}
|
||||
cpu: [arm]
|
||||
os: [linux]
|
||||
|
||||
lightningcss-linux-arm64-gnu@1.30.2:
|
||||
resolution: {integrity: sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A==}
|
||||
engines: {node: '>= 12.0.0'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
|
||||
lightningcss-linux-arm64-musl@1.30.2:
|
||||
resolution: {integrity: sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA==}
|
||||
engines: {node: '>= 12.0.0'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
|
||||
lightningcss-linux-x64-gnu@1.30.2:
|
||||
resolution: {integrity: sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w==}
|
||||
engines: {node: '>= 12.0.0'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
|
||||
lightningcss-linux-x64-musl@1.30.2:
|
||||
resolution: {integrity: sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA==}
|
||||
engines: {node: '>= 12.0.0'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
|
||||
lightningcss-win32-arm64-msvc@1.30.2:
|
||||
resolution: {integrity: sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ==}
|
||||
engines: {node: '>= 12.0.0'}
|
||||
cpu: [arm64]
|
||||
os: [win32]
|
||||
|
||||
lightningcss-win32-x64-msvc@1.30.2:
|
||||
resolution: {integrity: sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw==}
|
||||
engines: {node: '>= 12.0.0'}
|
||||
cpu: [x64]
|
||||
os: [win32]
|
||||
|
||||
lightningcss@1.30.2:
|
||||
resolution: {integrity: sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ==}
|
||||
engines: {node: '>= 12.0.0'}
|
||||
|
||||
magic-string@0.30.21:
|
||||
resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==}
|
||||
|
||||
micromatch@4.0.8:
|
||||
resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==}
|
||||
engines: {node: '>=8.6'}
|
||||
|
||||
mri@1.2.0:
|
||||
resolution: {integrity: sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==}
|
||||
engines: {node: '>=4'}
|
||||
|
||||
nanoid@3.3.11:
|
||||
resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==}
|
||||
engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1}
|
||||
hasBin: true
|
||||
|
||||
node-addon-api@7.1.1:
|
||||
resolution: {integrity: sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==}
|
||||
|
||||
picocolors@1.1.1:
|
||||
resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==}
|
||||
|
||||
picomatch@2.3.1:
|
||||
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
|
||||
engines: {node: '>=8.6'}
|
||||
|
||||
postcss@8.5.6:
|
||||
resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==}
|
||||
engines: {node: ^10 || ^12 || >=14}
|
||||
|
||||
prettier-plugin-jinja-template@2.1.0:
|
||||
resolution: {integrity: sha512-mzoCp2Oy9BDSug80fw3B3J4n4KQj1hRvoQOL1akqcDKBb5nvYxrik9zUEDs4AEJ6nK7QDTGoH0y9rx7AlnQ78Q==}
|
||||
peerDependencies:
|
||||
prettier: ^3.0.0
|
||||
|
||||
prettier-plugin-tailwindcss@0.7.2:
|
||||
resolution: {integrity: sha512-LkphyK3Fw+q2HdMOoiEHWf93fNtYJwfamoKPl7UwtjFQdei/iIBoX11G6j706FzN3ymX9mPVi97qIY8328vdnA==}
|
||||
engines: {node: '>=20.19'}
|
||||
peerDependencies:
|
||||
'@ianvs/prettier-plugin-sort-imports': '*'
|
||||
'@prettier/plugin-hermes': '*'
|
||||
'@prettier/plugin-oxc': '*'
|
||||
'@prettier/plugin-pug': '*'
|
||||
'@shopify/prettier-plugin-liquid': '*'
|
||||
'@trivago/prettier-plugin-sort-imports': '*'
|
||||
'@zackad/prettier-plugin-twig': '*'
|
||||
prettier: ^3.0
|
||||
prettier-plugin-astro: '*'
|
||||
prettier-plugin-css-order: '*'
|
||||
prettier-plugin-jsdoc: '*'
|
||||
prettier-plugin-marko: '*'
|
||||
prettier-plugin-multiline-arrays: '*'
|
||||
prettier-plugin-organize-attributes: '*'
|
||||
prettier-plugin-organize-imports: '*'
|
||||
prettier-plugin-sort-imports: '*'
|
||||
prettier-plugin-svelte: '*'
|
||||
peerDependenciesMeta:
|
||||
'@ianvs/prettier-plugin-sort-imports':
|
||||
optional: true
|
||||
'@prettier/plugin-hermes':
|
||||
optional: true
|
||||
'@prettier/plugin-oxc':
|
||||
optional: true
|
||||
'@prettier/plugin-pug':
|
||||
optional: true
|
||||
'@shopify/prettier-plugin-liquid':
|
||||
optional: true
|
||||
'@trivago/prettier-plugin-sort-imports':
|
||||
optional: true
|
||||
'@zackad/prettier-plugin-twig':
|
||||
optional: true
|
||||
prettier-plugin-astro:
|
||||
optional: true
|
||||
prettier-plugin-css-order:
|
||||
optional: true
|
||||
prettier-plugin-jsdoc:
|
||||
optional: true
|
||||
prettier-plugin-marko:
|
||||
optional: true
|
||||
prettier-plugin-multiline-arrays:
|
||||
optional: true
|
||||
prettier-plugin-organize-attributes:
|
||||
optional: true
|
||||
prettier-plugin-organize-imports:
|
||||
optional: true
|
||||
prettier-plugin-sort-imports:
|
||||
optional: true
|
||||
prettier-plugin-svelte:
|
||||
optional: true
|
||||
|
||||
prettier@3.7.4:
|
||||
resolution: {integrity: sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA==}
|
||||
engines: {node: '>=14'}
|
||||
hasBin: true
|
||||
|
||||
source-map-js@1.2.1:
|
||||
resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
tailwindcss@4.1.18:
|
||||
resolution: {integrity: sha512-4+Z+0yiYyEtUVCScyfHCxOYP06L5Ne+JiHhY2IjR2KWMIWhJOYZKLSGZaP5HkZ8+bY0cxfzwDE5uOmzFXyIwxw==}
|
||||
|
||||
tapable@2.3.0:
|
||||
resolution: {integrity: sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==}
|
||||
engines: {node: '>=6'}
|
||||
|
||||
to-regex-range@5.0.1:
|
||||
resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
|
||||
engines: {node: '>=8.0'}
|
||||
|
||||
snapshots:
|
||||
|
||||
'@alloc/quick-lru@5.2.0': {}
|
||||
|
||||
'@jridgewell/gen-mapping@0.3.13':
|
||||
dependencies:
|
||||
'@jridgewell/sourcemap-codec': 1.5.5
|
||||
'@jridgewell/trace-mapping': 0.3.31
|
||||
|
||||
'@jridgewell/remapping@2.3.5':
|
||||
dependencies:
|
||||
'@jridgewell/gen-mapping': 0.3.13
|
||||
'@jridgewell/trace-mapping': 0.3.31
|
||||
|
||||
'@jridgewell/resolve-uri@3.1.2': {}
|
||||
|
||||
'@jridgewell/sourcemap-codec@1.5.5': {}
|
||||
|
||||
'@jridgewell/trace-mapping@0.3.31':
|
||||
dependencies:
|
||||
'@jridgewell/resolve-uri': 3.1.2
|
||||
'@jridgewell/sourcemap-codec': 1.5.5
|
||||
|
||||
'@parcel/watcher-android-arm64@2.5.1':
|
||||
optional: true
|
||||
|
||||
'@parcel/watcher-darwin-arm64@2.5.1':
|
||||
optional: true
|
||||
|
||||
'@parcel/watcher-darwin-x64@2.5.1':
|
||||
optional: true
|
||||
|
||||
'@parcel/watcher-freebsd-x64@2.5.1':
|
||||
optional: true
|
||||
|
||||
'@parcel/watcher-linux-arm-glibc@2.5.1':
|
||||
optional: true
|
||||
|
||||
'@parcel/watcher-linux-arm-musl@2.5.1':
|
||||
optional: true
|
||||
|
||||
'@parcel/watcher-linux-arm64-glibc@2.5.1':
|
||||
optional: true
|
||||
|
||||
'@parcel/watcher-linux-arm64-musl@2.5.1':
|
||||
optional: true
|
||||
|
||||
'@parcel/watcher-linux-x64-glibc@2.5.1':
|
||||
optional: true
|
||||
|
||||
'@parcel/watcher-linux-x64-musl@2.5.1':
|
||||
optional: true
|
||||
|
||||
'@parcel/watcher-win32-arm64@2.5.1':
|
||||
optional: true
|
||||
|
||||
'@parcel/watcher-win32-ia32@2.5.1':
|
||||
optional: true
|
||||
|
||||
'@parcel/watcher-win32-x64@2.5.1':
|
||||
optional: true
|
||||
|
||||
'@parcel/watcher@2.5.1':
|
||||
dependencies:
|
||||
detect-libc: 1.0.3
|
||||
is-glob: 4.0.3
|
||||
micromatch: 4.0.8
|
||||
node-addon-api: 7.1.1
|
||||
optionalDependencies:
|
||||
'@parcel/watcher-android-arm64': 2.5.1
|
||||
'@parcel/watcher-darwin-arm64': 2.5.1
|
||||
'@parcel/watcher-darwin-x64': 2.5.1
|
||||
'@parcel/watcher-freebsd-x64': 2.5.1
|
||||
'@parcel/watcher-linux-arm-glibc': 2.5.1
|
||||
'@parcel/watcher-linux-arm-musl': 2.5.1
|
||||
'@parcel/watcher-linux-arm64-glibc': 2.5.1
|
||||
'@parcel/watcher-linux-arm64-musl': 2.5.1
|
||||
'@parcel/watcher-linux-x64-glibc': 2.5.1
|
||||
'@parcel/watcher-linux-x64-musl': 2.5.1
|
||||
'@parcel/watcher-win32-arm64': 2.5.1
|
||||
'@parcel/watcher-win32-ia32': 2.5.1
|
||||
'@parcel/watcher-win32-x64': 2.5.1
|
||||
|
||||
'@tailwindcss/cli@4.1.18':
|
||||
dependencies:
|
||||
'@parcel/watcher': 2.5.1
|
||||
'@tailwindcss/node': 4.1.18
|
||||
'@tailwindcss/oxide': 4.1.18
|
||||
enhanced-resolve: 5.18.4
|
||||
mri: 1.2.0
|
||||
picocolors: 1.1.1
|
||||
tailwindcss: 4.1.18
|
||||
|
||||
'@tailwindcss/node@4.1.18':
|
||||
dependencies:
|
||||
'@jridgewell/remapping': 2.3.5
|
||||
enhanced-resolve: 5.18.4
|
||||
jiti: 2.6.1
|
||||
lightningcss: 1.30.2
|
||||
magic-string: 0.30.21
|
||||
source-map-js: 1.2.1
|
||||
tailwindcss: 4.1.18
|
||||
|
||||
'@tailwindcss/oxide-android-arm64@4.1.18':
|
||||
optional: true
|
||||
|
||||
'@tailwindcss/oxide-darwin-arm64@4.1.18':
|
||||
optional: true
|
||||
|
||||
'@tailwindcss/oxide-darwin-x64@4.1.18':
|
||||
optional: true
|
||||
|
||||
'@tailwindcss/oxide-freebsd-x64@4.1.18':
|
||||
optional: true
|
||||
|
||||
'@tailwindcss/oxide-linux-arm-gnueabihf@4.1.18':
|
||||
optional: true
|
||||
|
||||
'@tailwindcss/oxide-linux-arm64-gnu@4.1.18':
|
||||
optional: true
|
||||
|
||||
'@tailwindcss/oxide-linux-arm64-musl@4.1.18':
|
||||
optional: true
|
||||
|
||||
'@tailwindcss/oxide-linux-x64-gnu@4.1.18':
|
||||
optional: true
|
||||
|
||||
'@tailwindcss/oxide-linux-x64-musl@4.1.18':
|
||||
optional: true
|
||||
|
||||
'@tailwindcss/oxide-wasm32-wasi@4.1.18':
|
||||
optional: true
|
||||
|
||||
'@tailwindcss/oxide-win32-arm64-msvc@4.1.18':
|
||||
optional: true
|
||||
|
||||
'@tailwindcss/oxide-win32-x64-msvc@4.1.18':
|
||||
optional: true
|
||||
|
||||
'@tailwindcss/oxide@4.1.18':
|
||||
optionalDependencies:
|
||||
'@tailwindcss/oxide-android-arm64': 4.1.18
|
||||
'@tailwindcss/oxide-darwin-arm64': 4.1.18
|
||||
'@tailwindcss/oxide-darwin-x64': 4.1.18
|
||||
'@tailwindcss/oxide-freebsd-x64': 4.1.18
|
||||
'@tailwindcss/oxide-linux-arm-gnueabihf': 4.1.18
|
||||
'@tailwindcss/oxide-linux-arm64-gnu': 4.1.18
|
||||
'@tailwindcss/oxide-linux-arm64-musl': 4.1.18
|
||||
'@tailwindcss/oxide-linux-x64-gnu': 4.1.18
|
||||
'@tailwindcss/oxide-linux-x64-musl': 4.1.18
|
||||
'@tailwindcss/oxide-wasm32-wasi': 4.1.18
|
||||
'@tailwindcss/oxide-win32-arm64-msvc': 4.1.18
|
||||
'@tailwindcss/oxide-win32-x64-msvc': 4.1.18
|
||||
|
||||
'@tailwindcss/postcss@4.1.18':
|
||||
dependencies:
|
||||
'@alloc/quick-lru': 5.2.0
|
||||
'@tailwindcss/node': 4.1.18
|
||||
'@tailwindcss/oxide': 4.1.18
|
||||
postcss: 8.5.6
|
||||
tailwindcss: 4.1.18
|
||||
|
||||
braces@3.0.3:
|
||||
dependencies:
|
||||
fill-range: 7.1.1
|
||||
|
||||
daisyui@5.5.14: {}
|
||||
|
||||
detect-libc@1.0.3: {}
|
||||
|
||||
detect-libc@2.1.2: {}
|
||||
|
||||
enhanced-resolve@5.18.4:
|
||||
dependencies:
|
||||
graceful-fs: 4.2.11
|
||||
tapable: 2.3.0
|
||||
|
||||
fill-range@7.1.1:
|
||||
dependencies:
|
||||
to-regex-range: 5.0.1
|
||||
|
||||
graceful-fs@4.2.11: {}
|
||||
|
||||
is-extglob@2.1.1: {}
|
||||
|
||||
is-glob@4.0.3:
|
||||
dependencies:
|
||||
is-extglob: 2.1.1
|
||||
|
||||
is-number@7.0.0: {}
|
||||
|
||||
jiti@2.6.1: {}
|
||||
|
||||
lightningcss-android-arm64@1.30.2:
|
||||
optional: true
|
||||
|
||||
lightningcss-darwin-arm64@1.30.2:
|
||||
optional: true
|
||||
|
||||
lightningcss-darwin-x64@1.30.2:
|
||||
optional: true
|
||||
|
||||
lightningcss-freebsd-x64@1.30.2:
|
||||
optional: true
|
||||
|
||||
lightningcss-linux-arm-gnueabihf@1.30.2:
|
||||
optional: true
|
||||
|
||||
lightningcss-linux-arm64-gnu@1.30.2:
|
||||
optional: true
|
||||
|
||||
lightningcss-linux-arm64-musl@1.30.2:
|
||||
optional: true
|
||||
|
||||
lightningcss-linux-x64-gnu@1.30.2:
|
||||
optional: true
|
||||
|
||||
lightningcss-linux-x64-musl@1.30.2:
|
||||
optional: true
|
||||
|
||||
lightningcss-win32-arm64-msvc@1.30.2:
|
||||
optional: true
|
||||
|
||||
lightningcss-win32-x64-msvc@1.30.2:
|
||||
optional: true
|
||||
|
||||
lightningcss@1.30.2:
|
||||
dependencies:
|
||||
detect-libc: 2.1.2
|
||||
optionalDependencies:
|
||||
lightningcss-android-arm64: 1.30.2
|
||||
lightningcss-darwin-arm64: 1.30.2
|
||||
lightningcss-darwin-x64: 1.30.2
|
||||
lightningcss-freebsd-x64: 1.30.2
|
||||
lightningcss-linux-arm-gnueabihf: 1.30.2
|
||||
lightningcss-linux-arm64-gnu: 1.30.2
|
||||
lightningcss-linux-arm64-musl: 1.30.2
|
||||
lightningcss-linux-x64-gnu: 1.30.2
|
||||
lightningcss-linux-x64-musl: 1.30.2
|
||||
lightningcss-win32-arm64-msvc: 1.30.2
|
||||
lightningcss-win32-x64-msvc: 1.30.2
|
||||
|
||||
magic-string@0.30.21:
|
||||
dependencies:
|
||||
'@jridgewell/sourcemap-codec': 1.5.5
|
||||
|
||||
micromatch@4.0.8:
|
||||
dependencies:
|
||||
braces: 3.0.3
|
||||
picomatch: 2.3.1
|
||||
|
||||
mri@1.2.0: {}
|
||||
|
||||
nanoid@3.3.11: {}
|
||||
|
||||
node-addon-api@7.1.1: {}
|
||||
|
||||
picocolors@1.1.1: {}
|
||||
|
||||
picomatch@2.3.1: {}
|
||||
|
||||
postcss@8.5.6:
|
||||
dependencies:
|
||||
nanoid: 3.3.11
|
||||
picocolors: 1.1.1
|
||||
source-map-js: 1.2.1
|
||||
|
||||
prettier-plugin-jinja-template@2.1.0(prettier@3.7.4):
|
||||
dependencies:
|
||||
prettier: 3.7.4
|
||||
|
||||
prettier-plugin-tailwindcss@0.7.2(prettier@3.7.4):
|
||||
dependencies:
|
||||
prettier: 3.7.4
|
||||
|
||||
prettier@3.7.4: {}
|
||||
|
||||
source-map-js@1.2.1: {}
|
||||
|
||||
tailwindcss@4.1.18: {}
|
||||
|
||||
tapable@2.3.0: {}
|
||||
|
||||
to-regex-range@5.0.1:
|
||||
dependencies:
|
||||
is-number: 7.0.0
|
||||
3
pnpm-workspace.yaml
Normal file
3
pnpm-workspace.yaml
Normal file
@ -0,0 +1,3 @@
|
||||
onlyBuiltDependencies:
|
||||
- '@parcel/watcher'
|
||||
- '@tailwindcss/oxide'
|
||||
@ -9,7 +9,8 @@ dependencies = [
|
||||
"django>=5.2.1",
|
||||
"django-extensions>=4.1",
|
||||
"django-model-utils>=5.0.0",
|
||||
"django-ninja>=1.4.1",
|
||||
"django-ninja>=1.4.5",
|
||||
"django-ninja-extra>=0.30.6",
|
||||
"django-oauth-toolkit>=3.0.1",
|
||||
"django-polymorphic>=4.1.0",
|
||||
"enviformer",
|
||||
@ -18,6 +19,7 @@ dependencies = [
|
||||
"envipy-plugins",
|
||||
"epam-indigo>=1.30.1",
|
||||
"gunicorn>=23.0.0",
|
||||
"jsonref>=1.1.0",
|
||||
"networkx>=3.4.2",
|
||||
"psycopg2-binary>=2.9.10",
|
||||
"python-dotenv>=1.1.0",
|
||||
@ -27,13 +29,14 @@ dependencies = [
|
||||
"scikit-learn>=1.6.1",
|
||||
"sentry-sdk[django]>=2.32.0",
|
||||
"setuptools>=80.8.0",
|
||||
"nh3==0.3.2",
|
||||
"polars==1.35.1",
|
||||
]
|
||||
|
||||
[tool.uv.sources]
|
||||
enviformer = { git = "ssh://git@git.envipath.com/enviPath/enviformer.git", rev = "v0.1.4" }
|
||||
envipy-plugins = { git = "ssh://git@git.envipath.com/enviPath/enviPy-plugins.git", rev = "v0.1.0" }
|
||||
envipy-additional-information = { git = "ssh://git@git.envipath.com/enviPath/enviPy-additional-information.git", rev = "v0.1.7"}
|
||||
envipy-additional-information = { git = "ssh://git@git.envipath.com/enviPath/enviPy-additional-information.git", rev = "v0.4.2" }
|
||||
envipy-ambit = { git = "ssh://git@git.envipath.com/enviPath/enviPy-ambit.git" }
|
||||
|
||||
[project.optional-dependencies]
|
||||
@ -44,6 +47,9 @@ dev = [
|
||||
"poethepoet>=0.37.0",
|
||||
"pre-commit>=4.3.0",
|
||||
"ruff>=0.13.3",
|
||||
"pytest-playwright>=0.7.1",
|
||||
"pytest-django>=4.11.1",
|
||||
"pytest-cov>=7.0.0",
|
||||
]
|
||||
|
||||
[tool.ruff]
|
||||
@ -65,23 +71,46 @@ docstring-code-format = true
|
||||
|
||||
[tool.poe.tasks]
|
||||
# Main tasks
|
||||
setup = { sequence = ["db-up", "migrate", "bootstrap"], help = "Complete setup: start database, run migrations, and bootstrap data" }
|
||||
dev = { cmd = "python manage.py runserver", help = "Start the development server", deps = ["db-up"] }
|
||||
setup = { sequence = [
|
||||
"db-up",
|
||||
"migrate",
|
||||
"bootstrap",
|
||||
], help = "Complete setup: start database, run migrations, and bootstrap data" }
|
||||
dev = { cmd = "uv run python scripts/dev_server.py", help = "Start the development server with CSS watcher", deps = [
|
||||
"db-up",
|
||||
"js-deps",
|
||||
] }
|
||||
build = { sequence = [
|
||||
"build-frontend",
|
||||
"collectstatic",
|
||||
], help = "Build frontend assets and collect static files" }
|
||||
|
||||
# Database tasks
|
||||
db-up = { cmd = "docker compose -f docker-compose.dev.yml up -d", help = "Start PostgreSQL database using Docker Compose" }
|
||||
db-down = { cmd = "docker compose -f docker-compose.dev.yml down", help = "Stop PostgreSQL database" }
|
||||
db-up = { cmd = "docker compose -p envipath -f docker-compose.dev.yml up -d", help = "Start PostgreSQL database using Docker Compose" }
|
||||
db-down = { cmd = "docker compose -p envipath -f docker-compose.dev.yml down", help = "Stop PostgreSQL database" }
|
||||
|
||||
# Celery tasks
|
||||
celery = { cmd = "celery -A envipath worker -l INFO -Q predict,model,background", help = "Start Celery worker for async task processing" }
|
||||
celery-dev = { sequence = [
|
||||
"db-up",
|
||||
"celery",
|
||||
], help = "Start database and Celery worker" }
|
||||
|
||||
# Frontend tasks
|
||||
js-deps = { cmd = "uv run python scripts/pnpm_wrapper.py install", help = "Install frontend dependencies" }
|
||||
|
||||
# Full cleanup tasks
|
||||
clean = { sequence = ["clean-db"], help = "Remove model files and database volumes (WARNING: destroys all data!)" }
|
||||
clean = { sequence = [
|
||||
"clean-db",
|
||||
], help = "Remove model files and database volumes (WARNING: destroys all data!)" }
|
||||
clean-db = { cmd = "docker compose -f docker-compose.dev.yml down -v", help = "Removes the database container and volume." }
|
||||
|
||||
# Django tasks
|
||||
migrate = { cmd = "python manage.py migrate", help = "Run database migrations" }
|
||||
migrate = { cmd = "uv run python manage.py migrate", help = "Run database migrations" }
|
||||
bootstrap = { shell = """
|
||||
echo "Bootstrapping initial data..."
|
||||
echo "This will take a bit ⏱️. Get yourself some coffee..."
|
||||
python manage.py bootstrap
|
||||
uv run python manage.py bootstrap
|
||||
echo "✓ Bootstrap complete"
|
||||
echo ""
|
||||
echo "Default admin credentials:"
|
||||
@ -89,4 +118,35 @@ echo " Username: admin"
|
||||
echo " Email: admin@envipath.com"
|
||||
echo " Password: SuperSafe"
|
||||
""", help = "Bootstrap initial data (anonymous user, packages, models)" }
|
||||
shell = { cmd = "python manage.py shell", help = "Open Django shell" }
|
||||
shell = { cmd = "uv run python manage.py shell", help = "Open Django shell" }
|
||||
|
||||
|
||||
build-frontend = { cmd = "uv run python scripts/pnpm_wrapper.py run build", help = "Build frontend assets using pnpm", deps = [
|
||||
"js-deps",
|
||||
] } # Build tasks
|
||||
|
||||
|
||||
collectstatic = { cmd = "uv run python manage.py collectstatic --noinput", help = "Collect static files for production", deps = [
|
||||
"build-frontend",
|
||||
] }
|
||||
|
||||
frontend-test-setup = { cmd = "playwright install", help = "Install the browsers required for frontend testing" }
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "--verbose --capture=no --durations=10"
|
||||
testpaths = ["tests", "*/tests"]
|
||||
pythonpath = ["."]
|
||||
norecursedirs = [
|
||||
"env",
|
||||
"venv",
|
||||
"envipy-plugins",
|
||||
"envipy-additional-information",
|
||||
"envipy-ambit",
|
||||
"enviformer",
|
||||
]
|
||||
markers = [
|
||||
"api: API tests",
|
||||
"frontend: Frontend tests",
|
||||
"end2end: End-to-end tests",
|
||||
"slow: Slow tests",
|
||||
]
|
||||
|
||||
206
scripts/dev_server.py
Executable file
206
scripts/dev_server.py
Executable file
@ -0,0 +1,206 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Cross-platform development server script.
|
||||
Starts pnpm CSS watcher and Django dev server, handling cleanup on exit.
|
||||
Works on both Windows and Unix systems.
|
||||
"""
|
||||
|
||||
import atexit
|
||||
import shutil
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import os
|
||||
import dotenv
|
||||
|
||||
|
||||
def find_pnpm():
|
||||
"""
|
||||
Find pnpm executable on the system.
|
||||
Returns the path to pnpm or None if not found.
|
||||
"""
|
||||
# Try to find pnpm using shutil.which
|
||||
# On Windows, this will find pnpm.cmd if it's in PATH
|
||||
pnpm_path = shutil.which("pnpm")
|
||||
|
||||
if pnpm_path:
|
||||
return pnpm_path
|
||||
|
||||
# On Windows, also try pnpm.cmd explicitly
|
||||
if sys.platform == "win32":
|
||||
pnpm_cmd = shutil.which("pnpm.cmd")
|
||||
if pnpm_cmd:
|
||||
return pnpm_cmd
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class DevServerManager:
|
||||
"""Manages background processes for development server."""
|
||||
|
||||
def __init__(self):
|
||||
self.processes = []
|
||||
self._cleanup_registered = False
|
||||
|
||||
def start_process(self, command, description, shell=False):
|
||||
"""Start a background process and return the process object."""
|
||||
print(f"Starting {description}...")
|
||||
try:
|
||||
if shell:
|
||||
# Use shell=True for commands that need shell interpretation
|
||||
process = subprocess.Popen(
|
||||
command,
|
||||
shell=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
text=True,
|
||||
bufsize=1,
|
||||
)
|
||||
else:
|
||||
# Split command into list for subprocess
|
||||
process = subprocess.Popen(
|
||||
command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
text=True,
|
||||
bufsize=1,
|
||||
)
|
||||
self.processes.append((process, description))
|
||||
print(" ".join(command))
|
||||
print(f"✓ Started {description} (PID: {process.pid})")
|
||||
return process
|
||||
except Exception as e:
|
||||
print(f"✗ Failed to start {description}: {e}", file=sys.stderr)
|
||||
self.cleanup()
|
||||
sys.exit(1)
|
||||
|
||||
def cleanup(self):
|
||||
"""Terminate all running processes."""
|
||||
if not self.processes:
|
||||
return
|
||||
|
||||
print("\nShutting down...")
|
||||
for process, description in self.processes:
|
||||
if process.poll() is None: # Process is still running
|
||||
try:
|
||||
# Try graceful termination first
|
||||
if sys.platform == "win32":
|
||||
process.terminate()
|
||||
else:
|
||||
process.send_signal(signal.SIGTERM)
|
||||
|
||||
# Wait up to 5 seconds for graceful shutdown
|
||||
try:
|
||||
process.wait(timeout=5)
|
||||
except subprocess.TimeoutExpired:
|
||||
# Force kill if graceful shutdown failed
|
||||
if sys.platform == "win32":
|
||||
process.kill()
|
||||
else:
|
||||
process.send_signal(signal.SIGKILL)
|
||||
process.wait()
|
||||
|
||||
print(f"✓ {description} stopped")
|
||||
except Exception as e:
|
||||
print(f"✗ Error stopping {description}: {e}", file=sys.stderr)
|
||||
|
||||
self.processes.clear()
|
||||
|
||||
def register_cleanup(self):
|
||||
"""Register cleanup handlers for various exit scenarios."""
|
||||
if self._cleanup_registered:
|
||||
return
|
||||
|
||||
self._cleanup_registered = True
|
||||
|
||||
# Register atexit handler (works on all platforms)
|
||||
atexit.register(self.cleanup)
|
||||
|
||||
# Register signal handlers (Unix only)
|
||||
if sys.platform != "win32":
|
||||
signal.signal(signal.SIGINT, self._signal_handler)
|
||||
signal.signal(signal.SIGTERM, self._signal_handler)
|
||||
|
||||
def _signal_handler(self, signum, frame):
|
||||
"""Handle Unix signals."""
|
||||
self.cleanup()
|
||||
sys.exit(0)
|
||||
|
||||
def wait_for_process(self, process, description):
|
||||
"""Wait for a process to finish and handle its output."""
|
||||
try:
|
||||
# Stream output from the process
|
||||
for line in iter(process.stdout.readline, ""):
|
||||
if line:
|
||||
print(f"[{description}] {line.rstrip()}")
|
||||
|
||||
process.wait()
|
||||
return process.returncode
|
||||
except KeyboardInterrupt:
|
||||
# Handle Ctrl+C
|
||||
self.cleanup()
|
||||
sys.exit(0)
|
||||
except Exception as e:
|
||||
print(f"Error waiting for {description}: {e}", file=sys.stderr)
|
||||
self.cleanup()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point."""
|
||||
dotenv.load_dotenv()
|
||||
manager = DevServerManager()
|
||||
manager.register_cleanup()
|
||||
|
||||
# Find pnpm executable
|
||||
pnpm_path = find_pnpm()
|
||||
if not pnpm_path:
|
||||
print("Error: pnpm not found in PATH.", file=sys.stderr)
|
||||
print("\nPlease install pnpm:", file=sys.stderr)
|
||||
print(" Windows: https://pnpm.io/installation#on-windows", file=sys.stderr)
|
||||
print(" Unix: https://pnpm.io/installation#on-posix-systems", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Determine shell usage based on platform
|
||||
use_shell = sys.platform == "win32"
|
||||
|
||||
# Start pnpm CSS watcher
|
||||
# Use the found pnpm path to ensure it works on Windows
|
||||
pnpm_command = f'"{pnpm_path}" run dev' if use_shell else [pnpm_path, "run", "dev"]
|
||||
manager.start_process(
|
||||
pnpm_command,
|
||||
"CSS watcher",
|
||||
shell=use_shell,
|
||||
)
|
||||
|
||||
# Give pnpm a moment to start
|
||||
time.sleep(1)
|
||||
|
||||
# Start Django dev server
|
||||
port = os.environ.get("DJANGO_PORT", "8000")
|
||||
django_process = manager.start_process(
|
||||
["uv", "run", "python", "manage.py", "runserver", f"0:{port}"],
|
||||
f"Django server on port {port}",
|
||||
shell=False,
|
||||
)
|
||||
|
||||
print("\nDevelopment servers are running. Press Ctrl+C to stop.\n")
|
||||
|
||||
try:
|
||||
# Wait for Django server (main process)
|
||||
# If Django exits, we should clean up everything
|
||||
return_code = manager.wait_for_process(django_process, "Django")
|
||||
|
||||
# If Django exited unexpectedly, clean up and exit
|
||||
if return_code != 0:
|
||||
manager.cleanup()
|
||||
sys.exit(return_code)
|
||||
except KeyboardInterrupt:
|
||||
# Ctrl+C was pressed
|
||||
manager.cleanup()
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
59
scripts/pnpm_wrapper.py
Executable file
59
scripts/pnpm_wrapper.py
Executable file
@ -0,0 +1,59 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Cross-platform pnpm command wrapper.
|
||||
Finds pnpm correctly on Windows (handles pnpm.cmd) and Unix systems.
|
||||
"""
|
||||
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def find_pnpm():
|
||||
"""
|
||||
Find pnpm executable on the system.
|
||||
Returns the path to pnpm or None if not found.
|
||||
"""
|
||||
# Try to find pnpm using shutil.which
|
||||
# On Windows, this will find pnpm.cmd if it's in PATH
|
||||
pnpm_path = shutil.which("pnpm")
|
||||
|
||||
if pnpm_path:
|
||||
return pnpm_path
|
||||
|
||||
# On Windows, also try pnpm.cmd explicitly
|
||||
if sys.platform == "win32":
|
||||
pnpm_cmd = shutil.which("pnpm.cmd")
|
||||
if pnpm_cmd:
|
||||
return pnpm_cmd
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point - execute pnpm with provided arguments."""
|
||||
pnpm_path = find_pnpm()
|
||||
|
||||
if not pnpm_path:
|
||||
print("Error: pnpm not found in PATH.", file=sys.stderr)
|
||||
print("\nPlease install pnpm:", file=sys.stderr)
|
||||
print(" Windows: https://pnpm.io/installation#on-windows", file=sys.stderr)
|
||||
print(" Unix: https://pnpm.io/installation#on-posix-systems", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Get all arguments passed to this script
|
||||
args = sys.argv[1:]
|
||||
|
||||
# Execute pnpm with the provided arguments
|
||||
try:
|
||||
sys.exit(subprocess.call([pnpm_path] + args))
|
||||
except KeyboardInterrupt:
|
||||
# Handle Ctrl+C gracefully
|
||||
sys.exit(130)
|
||||
except Exception as e:
|
||||
print(f"Error executing pnpm: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
84
static/css/daisyui-theme.css
Normal file
84
static/css/daisyui-theme.css
Normal file
@ -0,0 +1,84 @@
|
||||
/**
|
||||
* DaisyUI Themes - Generated by Style Dictionary
|
||||
* Theme mappings defined in tokens/daisyui-themes.json
|
||||
*/
|
||||
|
||||
/* Light theme (default) */
|
||||
@plugin "daisyui/theme" {
|
||||
name: "envipath";
|
||||
default: true;
|
||||
color-scheme: light;
|
||||
|
||||
--color-base-100: var(--color-neutral-50);
|
||||
--color-base-200: var(--color-neutral-100);
|
||||
--color-base-300: var(--color-neutral-200);
|
||||
--color-base-content: var(--color-neutral-900);
|
||||
--color-primary: var(--color-primary-500);
|
||||
--color-primary-content: var(--color-primary-50);
|
||||
--color-secondary: var(--color-secondary-500);
|
||||
--color-secondary-content: var(--color-secondary-50);
|
||||
--color-accent: var(--color-accent-500);
|
||||
--color-accent-content: var(--color-accent-50);
|
||||
--color-neutral: var(--color-neutral-950);
|
||||
--color-neutral-content: var(--color-neutral-100);
|
||||
--color-info: var(--color-info-500);
|
||||
--color-info-content: var(--color-info-950);
|
||||
--color-success: var(--color-success-500);
|
||||
--color-success-content: var(--color-success-950);
|
||||
--color-warning: var(--color-warning-500);
|
||||
--color-warning-content: var(--color-warning-950);
|
||||
--color-error: var(--color-error-500);
|
||||
--color-error-content: var(--color-error-950);
|
||||
|
||||
/* border radius */
|
||||
--radius-selector: 1rem;
|
||||
--radius-field: 0.25rem;
|
||||
--radius-box: 0.5rem;
|
||||
|
||||
/* base sizes */
|
||||
--size-selector: 0.25rem;
|
||||
--size-field: 0.25rem;
|
||||
|
||||
/* border size */
|
||||
--border: 1px;
|
||||
|
||||
/* effects */
|
||||
--depth: 1;
|
||||
--noise: 0;
|
||||
}
|
||||
|
||||
/* Dark theme (prefers-color-scheme: dark) */
|
||||
@plugin "daisyui/theme" {
|
||||
name: "envipath-dark";
|
||||
prefersdark: true;
|
||||
color-scheme: dark;
|
||||
|
||||
--color-primary: var(--color-primary-400);
|
||||
--color-primary-content: var(--color-neutral-950);
|
||||
--color-secondary: var(--color-secondary-400);
|
||||
--color-secondary-content: var(--color-neutral-950);
|
||||
--color-accent: var(--color-primary-500);
|
||||
--color-accent-content: var(--color-neutral-950);
|
||||
--color-neutral: var(--color-neutral-300);
|
||||
--color-neutral-content: var(--color-neutral-900);
|
||||
--color-base-100: var(--color-neutral-900);
|
||||
--color-base-200: var(--color-neutral-800);
|
||||
--color-base-300: var(--color-neutral-700);
|
||||
--color-base-content: var(--color-neutral-50);
|
||||
--color-info: var(--color-primary-400);
|
||||
--color-info-content: var(--color-neutral-950);
|
||||
--color-success: var(--color-success-400);
|
||||
--color-success-content: var(--color-neutral-950);
|
||||
--color-warning: var(--color-warning-400);
|
||||
--color-warning-content: var(--color-neutral-950);
|
||||
--color-error: var(--color-error-400);
|
||||
--color-error-content: var(--color-neutral-950);
|
||||
--radius-selector: 1rem;
|
||||
--radius-field: 0.25rem;
|
||||
--radius-box: 0.5rem;
|
||||
--size-selector: 0.25rem;
|
||||
--size-field: 0.25rem;
|
||||
--border: 1px;
|
||||
--depth: 1;
|
||||
--noise: 0;
|
||||
}
|
||||
36
static/css/input.css
Normal file
36
static/css/input.css
Normal file
@ -0,0 +1,36 @@
|
||||
@import "tailwindcss";
|
||||
|
||||
/* fira-code-latin-wght-normal */
|
||||
@font-face {
|
||||
font-family: 'Fira Code Variable';
|
||||
font-style: normal;
|
||||
font-display: swap;
|
||||
font-weight: 300 700;
|
||||
src: url(https://cdn.jsdelivr.net/fontsource/fonts/fira-code:vf@latest/latin-wght-normal.woff2) format('woff2-variations');
|
||||
unicode-range: U+0000-00FF,U+0131,U+0152-0153,U+02BB-02BC,U+02C6,U+02DA,U+02DC,U+0304,U+0308,U+0329,U+2000-206F,U+20AC,U+2122,U+2191,U+2193,U+2212,U+2215,U+FEFF,U+FFFD;
|
||||
}
|
||||
|
||||
/* inter-latin-wght-normal */
|
||||
@font-face {
|
||||
font-family: 'Inter Variable';
|
||||
font-style: normal;
|
||||
font-display: swap;
|
||||
font-weight: 100 900;
|
||||
src: url(https://cdn.jsdelivr.net/fontsource/fonts/inter:vf@latest/latin-wght-normal.woff2) format('woff2-variations');
|
||||
unicode-range: U+0000-00FF,U+0131,U+0152-0153,U+02BB-02BC,U+02C6,U+02DA,U+02DC,U+0304,U+0308,U+0329,U+2000-206F,U+20AC,U+2122,U+2191,U+2193,U+2212,U+2215,U+FEFF,U+FFFD;
|
||||
}
|
||||
|
||||
|
||||
/* Tell Tailwind where to find Django templates and Python files */
|
||||
@source "../../templates";
|
||||
|
||||
/* Custom theme configuration - must come before plugins */
|
||||
@import "./theme.css";
|
||||
|
||||
/* Import DaisyUI plugin */
|
||||
@plugin "daisyui" {
|
||||
logs: true;
|
||||
exclude: rootscrollgutter;
|
||||
}
|
||||
|
||||
@import "./daisyui-theme.css";
|
||||
111
static/css/theme.css
Normal file
111
static/css/theme.css
Normal file
@ -0,0 +1,111 @@
|
||||
/**
|
||||
* Tailwind v4 Theme - Generated by Style Dictionary
|
||||
* This creates Tailwind utility classes from design tokens
|
||||
*/
|
||||
|
||||
@theme {
|
||||
/* Colors */
|
||||
--color-primary-50: oklch(0.98 0.02 201);
|
||||
--color-primary-100: oklch(0.96 0.04 203);
|
||||
--color-primary-200: oklch(0.92 0.08 205);
|
||||
--color-primary-300: oklch(0.87 0.12 207);
|
||||
--color-primary-400: oklch(0.80 0.13 212);
|
||||
--color-primary-500: oklch(0.71 0.13 215);
|
||||
--color-primary-600: oklch(0.61 0.11 222);
|
||||
--color-primary-700: oklch(0.52 0.09 223);
|
||||
--color-primary-800: oklch(0.45 0.08 224);
|
||||
--color-primary-900: oklch(0.40 0.07 227);
|
||||
--color-primary-950: oklch(0.30 0.05 230);
|
||||
--color-secondary-50: oklch(0.98 0.02 166);
|
||||
--color-secondary-100: oklch(0.95 0.05 163);
|
||||
--color-secondary-200: oklch(0.90 0.09 164);
|
||||
--color-secondary-300: oklch(0.85 0.13 165);
|
||||
--color-secondary-400: oklch(0.77 0.15 163);
|
||||
--color-secondary-500: oklch(0.70 0.15 162);
|
||||
--color-secondary-600: oklch(0.60 0.13 163);
|
||||
--color-secondary-700: oklch(0.51 0.10 166);
|
||||
--color-secondary-800: oklch(0.43 0.09 167);
|
||||
--color-secondary-900: oklch(0.38 0.07 169);
|
||||
--color-secondary-950: oklch(0.26 0.05 173);
|
||||
--color-success-50: oklch(0.98 0.02 156);
|
||||
--color-success-100: oklch(0.96 0.04 157);
|
||||
--color-success-200: oklch(0.93 0.08 156);
|
||||
--color-success-300: oklch(0.87 0.14 154);
|
||||
--color-success-400: oklch(0.80 0.18 152);
|
||||
--color-success-500: oklch(0.72 0.19 150);
|
||||
--color-success-600: oklch(0.63 0.17 149);
|
||||
--color-success-700: oklch(0.53 0.14 150);
|
||||
--color-success-800: oklch(0.45 0.11 151);
|
||||
--color-success-900: oklch(0.39 0.09 153);
|
||||
--color-success-950: oklch(0.27 0.06 153);
|
||||
--color-warning-50: oklch(0.99 0.03 102);
|
||||
--color-warning-100: oklch(0.97 0.07 103);
|
||||
--color-warning-200: oklch(0.95 0.12 102);
|
||||
--color-warning-300: oklch(0.91 0.17 98);
|
||||
--color-warning-400: oklch(0.86 0.17 92);
|
||||
--color-warning-500: oklch(0.80 0.16 86);
|
||||
--color-warning-600: oklch(0.68 0.14 76);
|
||||
--color-warning-700: oklch(0.55 0.12 66);
|
||||
--color-warning-800: oklch(0.48 0.10 62);
|
||||
--color-warning-900: oklch(0.42 0.09 58);
|
||||
--color-warning-950: oklch(0.29 0.06 54);
|
||||
--color-error-50: oklch(0.97 0.01 17);
|
||||
--color-error-100: oklch(0.94 0.03 18);
|
||||
--color-error-200: oklch(0.88 0.06 18);
|
||||
--color-error-300: oklch(0.81 0.10 20);
|
||||
--color-error-400: oklch(0.71 0.17 22);
|
||||
--color-error-500: oklch(0.64 0.21 25);
|
||||
--color-error-600: oklch(0.58 0.22 27);
|
||||
--color-error-700: oklch(0.51 0.19 28);
|
||||
--color-error-800: oklch(0.44 0.16 27);
|
||||
--color-error-900: oklch(0.40 0.13 26);
|
||||
--color-error-950: oklch(0.26 0.09 26);
|
||||
--color-neutral-50: oklch(0.98 0.00 248);
|
||||
--color-neutral-100: oklch(0.97 0.01 248);
|
||||
--color-neutral-200: oklch(0.93 0.01 256);
|
||||
--color-neutral-300: oklch(0.87 0.02 253);
|
||||
--color-neutral-400: oklch(0.71 0.04 257);
|
||||
--color-neutral-500: oklch(0.55 0.04 257);
|
||||
--color-neutral-600: oklch(0.45 0.04 257);
|
||||
--color-neutral-700: oklch(0.37 0.04 257);
|
||||
--color-neutral-800: oklch(0.28 0.04 260);
|
||||
--color-neutral-900: oklch(0.28 0.04 260);
|
||||
--color-neutral-950: oklch(0.28 0.04 260);
|
||||
|
||||
/* Spacing */
|
||||
--spacing-0: 0;
|
||||
--spacing-1: 0.25rem;
|
||||
--spacing-2: 0.5rem;
|
||||
--spacing-3: 0.75rem;
|
||||
--spacing-4: 1rem;
|
||||
--spacing-5: 1.25rem;
|
||||
--spacing-6: 1.5rem;
|
||||
--spacing-7: 1.75rem;
|
||||
--spacing-8: 2rem;
|
||||
--spacing-10: 2.5rem;
|
||||
--spacing-12: 3rem;
|
||||
--spacing-16: 4rem;
|
||||
--spacing-20: 5rem;
|
||||
--spacing-24: 6rem;
|
||||
--spacing-32: 8rem;
|
||||
--spacing-40: 10rem;
|
||||
--spacing-48: 12rem;
|
||||
--spacing-56: 14rem;
|
||||
--spacing-64: 16rem;
|
||||
|
||||
/* Typography */
|
||||
--font-family-sans: 'Inter Variable', system-ui, -apple-system, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif;
|
||||
--font-family-mono: 'Fira Code Variable', 'Cascadia Code', 'Source Code Pro', Menlo, Consolas, monospace;
|
||||
--font-family-base: 'Inter Variable', system-ui, -apple-system, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif;
|
||||
--font-size-xs: 0.75rem;
|
||||
--font-size-sm: 0.875rem;
|
||||
--font-size-base: 1rem;
|
||||
--font-size-lg: 1.125rem;
|
||||
--font-size-xl: 1.25rem;
|
||||
--font-size-2xl: 1.5rem;
|
||||
--font-size-3xl: 1.875rem;
|
||||
--font-size-4xl: 2.25rem;
|
||||
--font-size-5xl: 3rem;
|
||||
--font-size-6xl: 3.75rem;
|
||||
--font-size-7xl: 4.5rem;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user