initial dump of deployment files
This commit is contained in:
commit
16e3c3fb31
8 changed files with 1593 additions and 0 deletions
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
/data
|
||||||
|
/.env
|
24
Makefile
Normal file
24
Makefile
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
CONFIG_FILE = conf/config.env
|
||||||
|
SECRET_FILE = conf/secrets.env
|
||||||
|
|
||||||
|
COMPOSE_CMD ?= docker compose
|
||||||
|
# use unix:///run/docker.sock for docker socket, unix://${XDG_RUNTIME_DIR}/podman/podman.sock for podman
|
||||||
|
DOCKER_HOST ?= unix:///run/docker.sock
|
||||||
|
COMPOSE = DOCKER_HOST=$(DOCKER_HOST) $(COMPOSE_CMD)
|
||||||
|
|
||||||
|
psql:
|
||||||
|
$(COMPOSE) exec inventree-db sh -c 'psql $$POSTGRES_USER $$POSTGRES_DB'
|
||||||
|
|
||||||
|
update:
|
||||||
|
read -p "Update will cause downtime of the server. Are you sure you want to continue? Press Ctrl+c to abort!" _
|
||||||
|
$(COMPOSE) down
|
||||||
|
$(COMPOSE) pull
|
||||||
|
$(COMPOSE) run inventree-server invoke update
|
||||||
|
$(COMPOSE) up -d
|
||||||
|
|
||||||
|
data: # podman does not autocreate data folder
|
||||||
|
mkdir data
|
||||||
|
|
||||||
|
# pass all commands to compose cli
|
||||||
|
%: data
|
||||||
|
$(COMPOSE) $@
|
22
README.md
Normal file
22
README.md
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
# InvenTree Deployment
|
||||||
|
|
||||||
|
These are the deployment files required to get InvenTree up and running. InvenTree is deployed as a `docker compose` setup and therefore has the following dependencies:
|
||||||
|
|
||||||
|
- Podman/Docker
|
||||||
|
- Docker Compose
|
||||||
|
- Make (as script runner)
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
Copy the `sample.env` into a file named `.env` and make sure to adapt all values to your needs, especially secrets.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
In order to run invoke an update or complete the first setup, `make update` is used to pull the latest images and apply all database migrations.
|
||||||
|
|
||||||
|
The command `make up` can be used to run the setup as a foreground service, `make "up -d"` can be used to run the setup in detached mode.
|
||||||
|
|
||||||
|
### SSO
|
||||||
|
|
||||||
|
Login as InvenTree admin user. Under `Settings > Login Settings` make sure to `Enable SSO`.
|
||||||
|
|
152
docker-compose.yml
Normal file
152
docker-compose.yml
Normal file
|
@ -0,0 +1,152 @@
|
||||||
|
version: "3.8"
|
||||||
|
|
||||||
|
# Docker compose recipe for a production-ready InvenTree setup, with the following containers:
|
||||||
|
# - PostgreSQL as the database backend
|
||||||
|
# - gunicorn as the InvenTree web server
|
||||||
|
# - django-q as the InvenTree background worker process
|
||||||
|
# - nginx as a reverse proxy
|
||||||
|
# - redis as the cache manager (optional, disabled by default)
|
||||||
|
|
||||||
|
# ---------------------
|
||||||
|
# READ BEFORE STARTING!
|
||||||
|
# ---------------------
|
||||||
|
|
||||||
|
# -----------------------------
|
||||||
|
# Setting environment variables
|
||||||
|
# -----------------------------
|
||||||
|
# Shared environment variables should be stored in the .env file
|
||||||
|
# Changes made to this file are reflected across all containers!
|
||||||
|
#
|
||||||
|
# IMPORTANT NOTE:
|
||||||
|
# You should not have to change *anything* within this docker-compose.yml file!
|
||||||
|
# Instead, make any changes in the .env file!
|
||||||
|
|
||||||
|
# ------------------------
|
||||||
|
# InvenTree Image Versions
|
||||||
|
# ------------------------
|
||||||
|
# By default, this docker-compose script targets the STABLE version of InvenTree,
|
||||||
|
# image: inventree/inventree:stable
|
||||||
|
#
|
||||||
|
# To run the LATEST (development) version of InvenTree,
|
||||||
|
# change the INVENTREE_TAG variable (in the .env file) to "latest"
|
||||||
|
#
|
||||||
|
# Alternatively, you could target a specific tagged release version with (for example):
|
||||||
|
# INVENTREE_TAG=0.7.5
|
||||||
|
#
|
||||||
|
|
||||||
|
services:
|
||||||
|
# Database service
|
||||||
|
# Use PostgreSQL as the database backend
|
||||||
|
inventree-db:
|
||||||
|
container_name: inventree-db
|
||||||
|
image: ${POSTGRES_IMAGE:?You must provide the 'POSTGRES_IMAGE' variable in the .env file}
|
||||||
|
expose:
|
||||||
|
- ${INVENTREE_DB_PORT:-5432}/tcp
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
- PGDATA=/var/lib/postgresql/data/pgdb
|
||||||
|
- POSTGRES_USER=${INVENTREE_DB_USER:?You must provide the 'INVENTREE_DB_USER' variable in the .env file}
|
||||||
|
- POSTGRES_PASSWORD=${INVENTREE_DB_PASSWORD:?You must provide the 'INVENTREE_DB_PASSWORD' variable in the .env file}
|
||||||
|
- POSTGRES_DB=${INVENTREE_DB_NAME:?You must provide the 'INVENTREE_DB_NAME' variable in the .env file}
|
||||||
|
volumes:
|
||||||
|
# Map 'data' volume such that postgres database is stored externally
|
||||||
|
- inventree_data:/var/lib/postgresql/data/:z
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
# redis acts as database cache manager
|
||||||
|
# only runs under the "redis" profile : https://docs.docker.com/compose/profiles/
|
||||||
|
inventree-cache:
|
||||||
|
container_name: inventree-cache
|
||||||
|
image: ${REDIS_IMAGE:?You must provide the 'REDIS_IMAGE' variable in the .env file}
|
||||||
|
depends_on:
|
||||||
|
- inventree-db
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
profiles:
|
||||||
|
- redis
|
||||||
|
expose:
|
||||||
|
- ${INVENTREE_CACHE_PORT:-6379}
|
||||||
|
restart: always
|
||||||
|
|
||||||
|
# InvenTree web server service
|
||||||
|
# Uses gunicorn as the web server
|
||||||
|
inventree-server:
|
||||||
|
container_name: inventree-server
|
||||||
|
# If you wish to specify a particular InvenTree version, do so here
|
||||||
|
image: ${INVENTREE_IMAGE:?You must provide the 'INVENTREE_IMAGE' variable in the .env file}
|
||||||
|
expose:
|
||||||
|
- 8000
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
INVENTREE_SOCIAL_PROVIDERS: |
|
||||||
|
{
|
||||||
|
"keycloak": {
|
||||||
|
"SERVERS": [
|
||||||
|
{
|
||||||
|
"KEYCLOAK_URL": "${HKNG_KEYCLOAK_URL:?You must provide the 'HKNG_KEYCLOAK_URL' variable in the .env file}",
|
||||||
|
"KEYCLOAK_REALM": "${HKNG_KEYCLOAK_REALM:?You must provide the 'HKNG_KEYCLOAK_REALM' variable in the .env file}",
|
||||||
|
"APP": {
|
||||||
|
"client_id": "${HKNG_KEYCLOAK_CLIENT_ID:?You must provide the 'HKNG_KEYCLOAK_CLIENT_ID' variable in the .env file}",
|
||||||
|
"secret": "${HKNG_KEYCLOAK_CLIENT_SECRET:?You must provide the 'HKNG_KEYCLOAK_CLIENT_SECRET' variable in the .env file}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
depends_on:
|
||||||
|
- inventree-db
|
||||||
|
volumes:
|
||||||
|
# Data volume must map to /home/inventree/data
|
||||||
|
- inventree_data:/home/inventree/data:z
|
||||||
|
# ugly backport of 0.11.0 features, to be removed
|
||||||
|
- ./patch/settings.py:/home/inventree/InvenTree/InvenTree/settings.py:ro,Z
|
||||||
|
- ./patch/config.py:/home/inventree/InvenTree/InvenTree/config.py:zo,Z
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
# Background worker process handles long-running or periodic tasks
|
||||||
|
inventree-worker:
|
||||||
|
container_name: inventree-worker
|
||||||
|
# If you wish to specify a particular InvenTree version, do so here
|
||||||
|
image: ${INVENTREE_IMAGE:?You must provide the 'INVENTREE_IMAGE' variable in the .env file}
|
||||||
|
command: invoke worker
|
||||||
|
depends_on:
|
||||||
|
- inventree-server
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
volumes:
|
||||||
|
# Data volume must map to /home/inventree/data
|
||||||
|
- inventree_data:/home/inventree/data:z
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
# nginx acts as a reverse proxy
|
||||||
|
# static files are served directly by nginx
|
||||||
|
# media files are served by nginx, although authentication is redirected to inventree-server
|
||||||
|
# web requests are redirected to gunicorn
|
||||||
|
# NOTE: You will need to provide a working nginx.conf file!
|
||||||
|
inventree-proxy:
|
||||||
|
container_name: inventree-proxy
|
||||||
|
image: ${NGINX_IMAGE:?You must provide the 'NGINX_IMAGE' variable in the .env file}
|
||||||
|
depends_on:
|
||||||
|
- inventree-server
|
||||||
|
ports:
|
||||||
|
# Default web port is 1337 (can be changed in the .env file)
|
||||||
|
- ${INVENTREE_WEB_PORT:-1337}:8080
|
||||||
|
volumes:
|
||||||
|
# Provide nginx configuration file to the container
|
||||||
|
# Refer to the provided example file as a starting point
|
||||||
|
- ./nginx.prod.conf:/etc/nginx/conf.d/default.conf:ro,Z
|
||||||
|
# nginx proxy needs access to static and media files
|
||||||
|
- inventree_data:/var/www:z
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
# Persistent data, stored external to the container(s)
|
||||||
|
inventree_data:
|
||||||
|
driver: local
|
||||||
|
driver_opts:
|
||||||
|
type: none
|
||||||
|
o: bind
|
||||||
|
# This directory specified where InvenTree data are stored "outside" the docker containers
|
||||||
|
device: ${INVENTREE_EXT_VOLUME:?You must specify the 'INVENTREE_EXT_VOLUME' variable in the .env file!}
|
64
nginx.prod.conf
Normal file
64
nginx.prod.conf
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
server {
|
||||||
|
|
||||||
|
# Listen for connection on (internal) port 8080 (unprivileged nginx)
|
||||||
|
listen 8080;
|
||||||
|
|
||||||
|
real_ip_header proxy_protocol;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
|
||||||
|
proxy_set_header Host $http_host;
|
||||||
|
proxy_set_header X-Forwarded-By $server_addr:$server_port;
|
||||||
|
proxy_set_header X-Forwarded-For $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header CLIENT_IP $remote_addr;
|
||||||
|
|
||||||
|
proxy_pass_request_headers on;
|
||||||
|
|
||||||
|
proxy_redirect off;
|
||||||
|
|
||||||
|
client_max_body_size 100M;
|
||||||
|
|
||||||
|
proxy_buffering off;
|
||||||
|
proxy_request_buffering off;
|
||||||
|
|
||||||
|
# Change 'inventree-server' to the name of the inventree server container,
|
||||||
|
# and '8000' to the INVENTREE_WEB_PORT (if not default)
|
||||||
|
proxy_pass http://inventree-server:8000;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Redirect any requests for static files
|
||||||
|
location /static/ {
|
||||||
|
alias /var/www/static/;
|
||||||
|
autoindex on;
|
||||||
|
|
||||||
|
# Caching settings
|
||||||
|
expires 30d;
|
||||||
|
add_header Pragma public;
|
||||||
|
add_header Cache-Control "public";
|
||||||
|
}
|
||||||
|
|
||||||
|
# Redirect any requests for media files
|
||||||
|
location /media/ {
|
||||||
|
alias /var/www/media/;
|
||||||
|
|
||||||
|
# Media files require user authentication
|
||||||
|
auth_request /auth;
|
||||||
|
|
||||||
|
# Content header to force download
|
||||||
|
add_header Content-disposition "attachment";
|
||||||
|
}
|
||||||
|
|
||||||
|
# Use the 'user' API endpoint for auth
|
||||||
|
location /auth {
|
||||||
|
internal;
|
||||||
|
|
||||||
|
proxy_pass http://inventree-server:8000/auth/;
|
||||||
|
|
||||||
|
proxy_pass_request_body off;
|
||||||
|
proxy_set_header Content-Length "";
|
||||||
|
proxy_set_header X-Original-URI $request_uri;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
347
patch/config.py
Normal file
347
patch/config.py
Normal file
|
@ -0,0 +1,347 @@
|
||||||
|
"""Helper functions for loading InvenTree configuration options."""
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import random
|
||||||
|
import shutil
|
||||||
|
import string
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
logger = logging.getLogger('inventree')
|
||||||
|
CONFIG_DATA = None
|
||||||
|
CONFIG_LOOKUPS = {}
|
||||||
|
|
||||||
|
|
||||||
|
def to_list(value, delimiter=','):
|
||||||
|
"""Take a configuration setting and make sure it is a list.
|
||||||
|
|
||||||
|
For example, we might have a configuration setting taken from the .config file,
|
||||||
|
which is already a list.
|
||||||
|
|
||||||
|
However, the same setting may be specified via an environment variable,
|
||||||
|
using a comma delimited string!
|
||||||
|
"""
|
||||||
|
|
||||||
|
if type(value) in [list, tuple]:
|
||||||
|
return value
|
||||||
|
|
||||||
|
# Otherwise, force string value
|
||||||
|
value = str(value)
|
||||||
|
|
||||||
|
return [x.strip() for x in value.split(delimiter)]
|
||||||
|
|
||||||
|
|
||||||
|
def to_dict(value):
|
||||||
|
"""Take a configuration setting and make sure it is a dict.
|
||||||
|
|
||||||
|
For example, we might have a configuration setting taken from the .config file,
|
||||||
|
which is already an object/dict.
|
||||||
|
|
||||||
|
However, the same setting may be specified via an environment variable,
|
||||||
|
using a valid JSON string!
|
||||||
|
"""
|
||||||
|
if value is None:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
if type(value) == dict:
|
||||||
|
return value
|
||||||
|
|
||||||
|
try:
|
||||||
|
return json.loads(value)
|
||||||
|
except Exception as error:
|
||||||
|
logger.error(f"Failed to parse value '{value}' as JSON with error {error}. Ensure value is a valid JSON string.")
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def is_true(x):
|
||||||
|
"""Shortcut function to determine if a value "looks" like a boolean"""
|
||||||
|
return str(x).strip().lower() in ['1', 'y', 'yes', 't', 'true', 'on']
|
||||||
|
|
||||||
|
|
||||||
|
def get_base_dir() -> Path:
|
||||||
|
"""Returns the base (top-level) InvenTree directory."""
|
||||||
|
return Path(__file__).parent.parent.resolve()
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_dir(path: Path) -> None:
|
||||||
|
"""Ensure that a directory exists.
|
||||||
|
|
||||||
|
If it does not exist, create it.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not path.exists():
|
||||||
|
path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
|
def get_config_file(create=True) -> Path:
|
||||||
|
"""Returns the path of the InvenTree configuration file.
|
||||||
|
|
||||||
|
Note: It will be created it if does not already exist!
|
||||||
|
"""
|
||||||
|
base_dir = get_base_dir()
|
||||||
|
|
||||||
|
cfg_filename = os.getenv('INVENTREE_CONFIG_FILE')
|
||||||
|
|
||||||
|
if cfg_filename:
|
||||||
|
cfg_filename = Path(cfg_filename.strip()).resolve()
|
||||||
|
else:
|
||||||
|
# Config file is *not* specified - use the default
|
||||||
|
cfg_filename = base_dir.joinpath('config.yaml').resolve()
|
||||||
|
|
||||||
|
if not cfg_filename.exists() and create:
|
||||||
|
print("InvenTree configuration file 'config.yaml' not found - creating default file")
|
||||||
|
ensure_dir(cfg_filename.parent)
|
||||||
|
|
||||||
|
cfg_template = base_dir.joinpath("config_template.yaml")
|
||||||
|
shutil.copyfile(cfg_template, cfg_filename)
|
||||||
|
print(f"Created config file {cfg_filename}")
|
||||||
|
|
||||||
|
return cfg_filename
|
||||||
|
|
||||||
|
|
||||||
|
def load_config_data(set_cache: bool = False) -> map:
|
||||||
|
"""Load configuration data from the config file.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
set_cache(bool): If True, the configuration data will be cached for future use after load.
|
||||||
|
"""
|
||||||
|
global CONFIG_DATA
|
||||||
|
|
||||||
|
# use cache if populated
|
||||||
|
# skip cache if cache should be set
|
||||||
|
if CONFIG_DATA is not None and not set_cache:
|
||||||
|
return CONFIG_DATA
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
cfg_file = get_config_file()
|
||||||
|
|
||||||
|
with open(cfg_file, 'r') as cfg:
|
||||||
|
data = yaml.safe_load(cfg)
|
||||||
|
|
||||||
|
# Set the cache if requested
|
||||||
|
if set_cache:
|
||||||
|
CONFIG_DATA = data
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def get_setting(env_var=None, config_key=None, default_value=None, typecast=None):
|
||||||
|
"""Helper function for retrieving a configuration setting value.
|
||||||
|
|
||||||
|
- First preference is to look for the environment variable
|
||||||
|
- Second preference is to look for the value of the settings file
|
||||||
|
- Third preference is the default value
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
env_var: Name of the environment variable e.g. 'INVENTREE_STATIC_ROOT'
|
||||||
|
config_key: Key to lookup in the configuration file
|
||||||
|
default_value: Value to return if first two options are not provided
|
||||||
|
typecast: Function to use for typecasting the value
|
||||||
|
"""
|
||||||
|
def try_typecasting(value, source: str):
|
||||||
|
"""Attempt to typecast the value"""
|
||||||
|
|
||||||
|
# Force 'list' of strings
|
||||||
|
if typecast is list:
|
||||||
|
value = to_list(value)
|
||||||
|
|
||||||
|
# Valid JSON string is required
|
||||||
|
elif typecast is dict:
|
||||||
|
value = to_dict(value)
|
||||||
|
|
||||||
|
elif typecast is not None:
|
||||||
|
# Try to typecast the value
|
||||||
|
try:
|
||||||
|
val = typecast(value)
|
||||||
|
set_metadata(source)
|
||||||
|
return val
|
||||||
|
except Exception as error:
|
||||||
|
logger.error(f"Failed to typecast '{env_var}' with value '{value}' to type '{typecast}' with error {error}")
|
||||||
|
|
||||||
|
set_metadata(source)
|
||||||
|
return value
|
||||||
|
|
||||||
|
def set_metadata(source: str):
|
||||||
|
"""Set lookup metadata for the setting."""
|
||||||
|
key = env_var or config_key
|
||||||
|
CONFIG_LOOKUPS[key] = {'env_var': env_var, 'config_key': config_key, 'source': source, 'accessed': datetime.datetime.now()}
|
||||||
|
|
||||||
|
# First, try to load from the environment variables
|
||||||
|
if env_var is not None:
|
||||||
|
val = os.getenv(env_var, None)
|
||||||
|
|
||||||
|
if val is not None:
|
||||||
|
return try_typecasting(val, 'env')
|
||||||
|
|
||||||
|
# Next, try to load from configuration file
|
||||||
|
if config_key is not None:
|
||||||
|
cfg_data = load_config_data()
|
||||||
|
|
||||||
|
result = None
|
||||||
|
|
||||||
|
# Hack to allow 'path traversal' in configuration file
|
||||||
|
for key in config_key.strip().split('.'):
|
||||||
|
|
||||||
|
if type(cfg_data) is not dict or key not in cfg_data:
|
||||||
|
result = None
|
||||||
|
break
|
||||||
|
|
||||||
|
result = cfg_data[key]
|
||||||
|
cfg_data = cfg_data[key]
|
||||||
|
|
||||||
|
if result is not None:
|
||||||
|
return try_typecasting(result, 'yaml')
|
||||||
|
|
||||||
|
# Finally, return the default value
|
||||||
|
return try_typecasting(default_value, 'default')
|
||||||
|
|
||||||
|
|
||||||
|
def get_boolean_setting(env_var=None, config_key=None, default_value=False):
|
||||||
|
"""Helper function for retreiving a boolean configuration setting"""
|
||||||
|
|
||||||
|
return is_true(get_setting(env_var, config_key, default_value))
|
||||||
|
|
||||||
|
|
||||||
|
def get_media_dir(create=True):
|
||||||
|
"""Return the absolute path for the 'media' directory (where uploaded files are stored)"""
|
||||||
|
|
||||||
|
md = get_setting('INVENTREE_MEDIA_ROOT', 'media_root')
|
||||||
|
|
||||||
|
if not md:
|
||||||
|
raise FileNotFoundError('INVENTREE_MEDIA_ROOT not specified')
|
||||||
|
|
||||||
|
md = Path(md).resolve()
|
||||||
|
|
||||||
|
if create:
|
||||||
|
md.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
return md
|
||||||
|
|
||||||
|
|
||||||
|
def get_static_dir(create=True):
|
||||||
|
"""Return the absolute path for the 'static' directory (where static files are stored)"""
|
||||||
|
|
||||||
|
sd = get_setting('INVENTREE_STATIC_ROOT', 'static_root')
|
||||||
|
|
||||||
|
if not sd:
|
||||||
|
raise FileNotFoundError('INVENTREE_STATIC_ROOT not specified')
|
||||||
|
|
||||||
|
sd = Path(sd).resolve()
|
||||||
|
|
||||||
|
if create:
|
||||||
|
sd.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
return sd
|
||||||
|
|
||||||
|
|
||||||
|
def get_backup_dir(create=True):
|
||||||
|
"""Return the absolute path for the backup directory"""
|
||||||
|
|
||||||
|
bd = get_setting('INVENTREE_BACKUP_DIR', 'backup_dir')
|
||||||
|
|
||||||
|
if not bd:
|
||||||
|
raise FileNotFoundError('INVENTREE_BACKUP_DIR not specified')
|
||||||
|
|
||||||
|
bd = Path(bd).resolve()
|
||||||
|
|
||||||
|
if create:
|
||||||
|
bd.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
return bd
|
||||||
|
|
||||||
|
|
||||||
|
def get_plugin_file():
|
||||||
|
"""Returns the path of the InvenTree plugins specification file.
|
||||||
|
|
||||||
|
Note: It will be created if it does not already exist!
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Check if the plugin.txt file (specifying required plugins) is specified
|
||||||
|
plugin_file = get_setting('INVENTREE_PLUGIN_FILE', 'plugin_file')
|
||||||
|
|
||||||
|
if not plugin_file:
|
||||||
|
# If not specified, look in the same directory as the configuration file
|
||||||
|
config_dir = get_config_file().parent
|
||||||
|
plugin_file = config_dir.joinpath('plugins.txt')
|
||||||
|
else:
|
||||||
|
# Make sure we are using a modern Path object
|
||||||
|
plugin_file = Path(plugin_file)
|
||||||
|
|
||||||
|
if not plugin_file.exists():
|
||||||
|
logger.warning("Plugin configuration file does not exist - creating default file")
|
||||||
|
logger.info(f"Creating plugin file at '{plugin_file}'")
|
||||||
|
ensure_dir(plugin_file.parent)
|
||||||
|
|
||||||
|
# If opening the file fails (no write permission, for example), then this will throw an error
|
||||||
|
plugin_file.write_text("# InvenTree Plugins (uses PIP framework to install)\n\n")
|
||||||
|
|
||||||
|
return plugin_file
|
||||||
|
|
||||||
|
|
||||||
|
def get_secret_key():
|
||||||
|
"""Return the secret key value which will be used by django.
|
||||||
|
|
||||||
|
Following options are tested, in descending order of preference:
|
||||||
|
|
||||||
|
A) Check for environment variable INVENTREE_SECRET_KEY => Use raw key data
|
||||||
|
B) Check for environment variable INVENTREE_SECRET_KEY_FILE => Load key data from file
|
||||||
|
C) Look for default key file "secret_key.txt"
|
||||||
|
D) Create "secret_key.txt" if it does not exist
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Look for environment variable
|
||||||
|
if secret_key := get_setting('INVENTREE_SECRET_KEY', 'secret_key'):
|
||||||
|
logger.info("SECRET_KEY loaded by INVENTREE_SECRET_KEY") # pragma: no cover
|
||||||
|
return secret_key
|
||||||
|
|
||||||
|
# Look for secret key file
|
||||||
|
if secret_key_file := get_setting('INVENTREE_SECRET_KEY_FILE', 'secret_key_file'):
|
||||||
|
secret_key_file = Path(secret_key_file).resolve()
|
||||||
|
else:
|
||||||
|
# Default location for secret key file
|
||||||
|
secret_key_file = get_base_dir().joinpath("secret_key.txt").resolve()
|
||||||
|
|
||||||
|
if not secret_key_file.exists():
|
||||||
|
logger.info(f"Generating random key file at '{secret_key_file}'")
|
||||||
|
ensure_dir(secret_key_file.parent)
|
||||||
|
|
||||||
|
# Create a random key file
|
||||||
|
options = string.digits + string.ascii_letters + string.punctuation
|
||||||
|
key = ''.join([random.choice(options) for i in range(100)])
|
||||||
|
secret_key_file.write_text(key)
|
||||||
|
|
||||||
|
logger.info(f"Loading SECRET_KEY from '{secret_key_file}'")
|
||||||
|
|
||||||
|
key_data = secret_key_file.read_text().strip()
|
||||||
|
|
||||||
|
return key_data
|
||||||
|
|
||||||
|
|
||||||
|
def get_custom_file(env_ref: str, conf_ref: str, log_ref: str, lookup_media: bool = False):
|
||||||
|
"""Returns the checked path to a custom file.
|
||||||
|
|
||||||
|
Set lookup_media to True to also search in the media folder.
|
||||||
|
"""
|
||||||
|
from django.contrib.staticfiles.storage import StaticFilesStorage
|
||||||
|
from django.core.files.storage import default_storage
|
||||||
|
|
||||||
|
value = get_setting(env_ref, conf_ref, None)
|
||||||
|
|
||||||
|
if not value:
|
||||||
|
return None
|
||||||
|
|
||||||
|
static_storage = StaticFilesStorage()
|
||||||
|
|
||||||
|
if static_storage.exists(value):
|
||||||
|
logger.info(f"Loading {log_ref} from static directory: {value}")
|
||||||
|
elif lookup_media and default_storage.exists(value):
|
||||||
|
logger.info(f"Loading {log_ref} from media directory: {value}")
|
||||||
|
else:
|
||||||
|
add_dir_str = ' or media' if lookup_media else ''
|
||||||
|
logger.warning(f"The {log_ref} file '{value}' could not be found in the static{add_dir_str} directories")
|
||||||
|
value = False
|
||||||
|
|
||||||
|
return value
|
914
patch/settings.py
Normal file
914
patch/settings.py
Normal file
|
@ -0,0 +1,914 @@
|
||||||
|
"""Django settings for InvenTree project.
|
||||||
|
|
||||||
|
In practice the settings in this file should not be adjusted,
|
||||||
|
instead settings can be configured in the config.yaml file
|
||||||
|
located in the top level project directory.
|
||||||
|
|
||||||
|
This allows implementation configuration to be hidden from source control,
|
||||||
|
as well as separate configuration parameters from the more complex
|
||||||
|
database setup in this file.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import socket
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import django.conf.locale
|
||||||
|
import django.core.exceptions
|
||||||
|
from django.http import Http404
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
import moneyed
|
||||||
|
import sentry_sdk
|
||||||
|
from sentry_sdk.integrations.django import DjangoIntegration
|
||||||
|
|
||||||
|
from . import config
|
||||||
|
from .config import get_boolean_setting, get_custom_file, get_setting
|
||||||
|
|
||||||
|
INVENTREE_NEWS_URL = 'https://inventree.org/news/feed.atom'
|
||||||
|
|
||||||
|
# Determine if we are running in "test" mode e.g. "manage.py test"
|
||||||
|
TESTING = 'test' in sys.argv
|
||||||
|
|
||||||
|
if TESTING:
|
||||||
|
|
||||||
|
# Use a weaker password hasher for testing (improves testing speed)
|
||||||
|
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher',]
|
||||||
|
|
||||||
|
# Enable slow-test-runner
|
||||||
|
TEST_RUNNER = 'django_slowtests.testrunner.DiscoverSlowestTestsRunner'
|
||||||
|
NUM_SLOW_TESTS = 25
|
||||||
|
|
||||||
|
# Note: The following fix is "required" for docker build workflow
|
||||||
|
# Note: 2022-12-12 still unsure why...
|
||||||
|
if os.getenv('INVENTREE_DOCKER'):
|
||||||
|
# Ensure that sys.path includes global python libs
|
||||||
|
site_packages = '/usr/local/lib/python3.9/site-packages'
|
||||||
|
|
||||||
|
if site_packages not in sys.path:
|
||||||
|
print("Adding missing site-packages path:", site_packages)
|
||||||
|
sys.path.append(site_packages)
|
||||||
|
|
||||||
|
# Are environment variables manipulated by tests? Needs to be set by testing code
|
||||||
|
TESTING_ENV = False
|
||||||
|
|
||||||
|
# New requirement for django 3.2+
|
||||||
|
DEFAULT_AUTO_FIELD = 'django.db.models.AutoField'
|
||||||
|
|
||||||
|
# Build paths inside the project like this: BASE_DIR.joinpath(...)
|
||||||
|
BASE_DIR = config.get_base_dir()
|
||||||
|
|
||||||
|
# Load configuration data
|
||||||
|
CONFIG = config.load_config_data(set_cache=True)
|
||||||
|
|
||||||
|
# Default action is to run the system in Debug mode
|
||||||
|
# SECURITY WARNING: don't run with debug turned on in production!
|
||||||
|
DEBUG = get_boolean_setting('INVENTREE_DEBUG', 'debug', True)
|
||||||
|
|
||||||
|
# Configure logging settings
|
||||||
|
log_level = get_setting('INVENTREE_LOG_LEVEL', 'log_level', 'WARNING')
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
level=log_level,
|
||||||
|
format="%(asctime)s %(levelname)s %(message)s",
|
||||||
|
)
|
||||||
|
|
||||||
|
if log_level not in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']:
|
||||||
|
log_level = 'WARNING' # pragma: no cover
|
||||||
|
|
||||||
|
LOGGING = {
|
||||||
|
'version': 1,
|
||||||
|
'disable_existing_loggers': False,
|
||||||
|
'handlers': {
|
||||||
|
'console': {
|
||||||
|
'class': 'logging.StreamHandler',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'root': {
|
||||||
|
'handlers': ['console'],
|
||||||
|
'level': log_level,
|
||||||
|
},
|
||||||
|
'filters': {
|
||||||
|
'require_not_maintenance_mode_503': {
|
||||||
|
'()': 'maintenance_mode.logging.RequireNotMaintenanceMode503',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get a logger instance for this setup file
|
||||||
|
logger = logging.getLogger("inventree")
|
||||||
|
|
||||||
|
# Load SECRET_KEY
|
||||||
|
SECRET_KEY = config.get_secret_key()
|
||||||
|
|
||||||
|
# The filesystem location for served static files
|
||||||
|
STATIC_ROOT = config.get_static_dir()
|
||||||
|
|
||||||
|
# The filesystem location for uploaded meadia files
|
||||||
|
MEDIA_ROOT = config.get_media_dir()
|
||||||
|
|
||||||
|
# List of allowed hosts (default = allow all)
|
||||||
|
ALLOWED_HOSTS = get_setting(
|
||||||
|
"INVENTREE_ALLOWED_HOSTS",
|
||||||
|
config_key='allowed_hosts',
|
||||||
|
default_value=['*'],
|
||||||
|
typecast=list,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Cross Origin Resource Sharing (CORS) options
|
||||||
|
|
||||||
|
# Only allow CORS access to API
|
||||||
|
CORS_URLS_REGEX = r'^/api/.*$'
|
||||||
|
|
||||||
|
# Extract CORS options from configuration file
|
||||||
|
CORS_ORIGIN_ALLOW_ALL = get_boolean_setting(
|
||||||
|
"INVENTREE_CORS_ORIGIN_ALLOW_ALL",
|
||||||
|
config_key='cors.allow_all',
|
||||||
|
default_value=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
CORS_ORIGIN_WHITELIST = get_setting(
|
||||||
|
"INVENTREE_CORS_ORIGIN_WHITELIST",
|
||||||
|
config_key='cors.whitelist',
|
||||||
|
default_value=[],
|
||||||
|
typecast=list,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Needed for the parts importer, directly impacts the maximum parts that can be uploaded
|
||||||
|
DATA_UPLOAD_MAX_NUMBER_FIELDS = 10000
|
||||||
|
|
||||||
|
# Web URL endpoint for served static files
|
||||||
|
STATIC_URL = '/static/'
|
||||||
|
|
||||||
|
STATICFILES_DIRS = []
|
||||||
|
|
||||||
|
# Translated Template settings
|
||||||
|
STATICFILES_I18_PREFIX = 'i18n'
|
||||||
|
STATICFILES_I18_SRC = BASE_DIR.joinpath('templates', 'js', 'translated')
|
||||||
|
STATICFILES_I18_TRG = BASE_DIR.joinpath('InvenTree', 'static_i18n')
|
||||||
|
STATICFILES_DIRS.append(STATICFILES_I18_TRG)
|
||||||
|
STATICFILES_I18_TRG = STATICFILES_I18_TRG.joinpath(STATICFILES_I18_PREFIX)
|
||||||
|
|
||||||
|
STATFILES_I18_PROCESSORS = [
|
||||||
|
'InvenTree.context.status_codes',
|
||||||
|
]
|
||||||
|
|
||||||
|
# Color Themes Directory
|
||||||
|
STATIC_COLOR_THEMES_DIR = STATIC_ROOT.joinpath('css', 'color-themes').resolve()
|
||||||
|
|
||||||
|
# Web URL endpoint for served media files
|
||||||
|
MEDIA_URL = '/media/'
|
||||||
|
|
||||||
|
# Database backup options
|
||||||
|
# Ref: https://django-dbbackup.readthedocs.io/en/master/configuration.html
|
||||||
|
DBBACKUP_SEND_EMAIL = False
|
||||||
|
DBBACKUP_STORAGE = get_setting(
|
||||||
|
'INVENTREE_BACKUP_STORAGE',
|
||||||
|
'backup_storage',
|
||||||
|
'django.core.files.storage.FileSystemStorage'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Default backup configuration
|
||||||
|
DBBACKUP_STORAGE_OPTIONS = get_setting('INVENTREE_BACKUP_OPTIONS', 'backup_options', None)
|
||||||
|
if DBBACKUP_STORAGE_OPTIONS is None:
|
||||||
|
DBBACKUP_STORAGE_OPTIONS = {
|
||||||
|
'location': config.get_backup_dir(),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Application definition
|
||||||
|
|
||||||
|
INSTALLED_APPS = [
|
||||||
|
# Admin site integration
|
||||||
|
'django.contrib.admin',
|
||||||
|
|
||||||
|
# InvenTree apps
|
||||||
|
'build.apps.BuildConfig',
|
||||||
|
'common.apps.CommonConfig',
|
||||||
|
'company.apps.CompanyConfig',
|
||||||
|
'label.apps.LabelConfig',
|
||||||
|
'order.apps.OrderConfig',
|
||||||
|
'part.apps.PartConfig',
|
||||||
|
'report.apps.ReportConfig',
|
||||||
|
'stock.apps.StockConfig',
|
||||||
|
'users.apps.UsersConfig',
|
||||||
|
'plugin.apps.PluginAppConfig',
|
||||||
|
'InvenTree.apps.InvenTreeConfig', # InvenTree app runs last
|
||||||
|
|
||||||
|
# Core django modules
|
||||||
|
'django.contrib.auth',
|
||||||
|
'django.contrib.contenttypes',
|
||||||
|
'user_sessions', # db user sessions
|
||||||
|
'django.contrib.messages',
|
||||||
|
'django.contrib.staticfiles',
|
||||||
|
'django.contrib.sites',
|
||||||
|
|
||||||
|
# Maintenance
|
||||||
|
'maintenance_mode',
|
||||||
|
|
||||||
|
# Third part add-ons
|
||||||
|
'django_filters', # Extended filter functionality
|
||||||
|
'rest_framework', # DRF (Django Rest Framework)
|
||||||
|
'rest_framework.authtoken', # Token authentication for API
|
||||||
|
'corsheaders', # Cross-origin Resource Sharing for DRF
|
||||||
|
'crispy_forms', # Improved form rendering
|
||||||
|
'import_export', # Import / export tables to file
|
||||||
|
'django_cleanup.apps.CleanupConfig', # Automatically delete orphaned MEDIA files
|
||||||
|
'mptt', # Modified Preorder Tree Traversal
|
||||||
|
'markdownify', # Markdown template rendering
|
||||||
|
'djmoney', # django-money integration
|
||||||
|
'djmoney.contrib.exchange', # django-money exchange rates
|
||||||
|
'error_report', # Error reporting in the admin interface
|
||||||
|
'django_q',
|
||||||
|
'formtools', # Form wizard tools
|
||||||
|
'dbbackup', # Backups - django-dbbackup
|
||||||
|
|
||||||
|
'allauth', # Base app for SSO
|
||||||
|
'allauth.account', # Extend user with accounts
|
||||||
|
'allauth.socialaccount', # Use 'social' providers
|
||||||
|
|
||||||
|
'django_otp', # OTP is needed for MFA - base package
|
||||||
|
'django_otp.plugins.otp_totp', # Time based OTP
|
||||||
|
'django_otp.plugins.otp_static', # Backup codes
|
||||||
|
|
||||||
|
'allauth_2fa', # MFA flow for allauth
|
||||||
|
|
||||||
|
'django_ical', # For exporting calendars
|
||||||
|
]
|
||||||
|
|
||||||
|
MIDDLEWARE = CONFIG.get('middleware', [
|
||||||
|
'django.middleware.security.SecurityMiddleware',
|
||||||
|
'x_forwarded_for.middleware.XForwardedForMiddleware',
|
||||||
|
'user_sessions.middleware.SessionMiddleware', # db user sessions
|
||||||
|
'django.middleware.locale.LocaleMiddleware',
|
||||||
|
'django.middleware.common.CommonMiddleware',
|
||||||
|
'django.middleware.csrf.CsrfViewMiddleware',
|
||||||
|
'corsheaders.middleware.CorsMiddleware',
|
||||||
|
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||||
|
'InvenTree.middleware.InvenTreeRemoteUserMiddleware', # Remote / proxy auth
|
||||||
|
'django_otp.middleware.OTPMiddleware', # MFA support
|
||||||
|
'InvenTree.middleware.CustomAllauthTwoFactorMiddleware', # Flow control for allauth
|
||||||
|
'django.contrib.messages.middleware.MessageMiddleware',
|
||||||
|
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
||||||
|
'InvenTree.middleware.AuthRequiredMiddleware',
|
||||||
|
'InvenTree.middleware.Check2FAMiddleware', # Check if the user should be forced to use MFA
|
||||||
|
'maintenance_mode.middleware.MaintenanceModeMiddleware',
|
||||||
|
'InvenTree.middleware.InvenTreeExceptionProcessor', # Error reporting
|
||||||
|
])
|
||||||
|
|
||||||
|
AUTHENTICATION_BACKENDS = CONFIG.get('authentication_backends', [
|
||||||
|
'django.contrib.auth.backends.RemoteUserBackend', # proxy login
|
||||||
|
'django.contrib.auth.backends.ModelBackend',
|
||||||
|
'allauth.account.auth_backends.AuthenticationBackend', # SSO login via external providers
|
||||||
|
])
|
||||||
|
|
||||||
|
DEBUG_TOOLBAR_ENABLED = DEBUG and get_setting('INVENTREE_DEBUG_TOOLBAR', 'debug_toolbar', False)
|
||||||
|
|
||||||
|
# If the debug toolbar is enabled, add the modules
|
||||||
|
if DEBUG_TOOLBAR_ENABLED: # pragma: no cover
|
||||||
|
logger.info("Running with DEBUG_TOOLBAR enabled")
|
||||||
|
INSTALLED_APPS.append('debug_toolbar')
|
||||||
|
MIDDLEWARE.append('debug_toolbar.middleware.DebugToolbarMiddleware')
|
||||||
|
|
||||||
|
DEBUG_TOOLBAR_CONFIG = {
|
||||||
|
'RESULTS_CACHE_SIZE': 100,
|
||||||
|
'OBSERVE_REQUEST_CALLBACK': lambda x: False,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Internal IP addresses allowed to see the debug toolbar
|
||||||
|
INTERNAL_IPS = [
|
||||||
|
'127.0.0.1',
|
||||||
|
]
|
||||||
|
|
||||||
|
# Internal flag to determine if we are running in docker mode
|
||||||
|
DOCKER = get_boolean_setting('INVENTREE_DOCKER', default_value=False)
|
||||||
|
|
||||||
|
if DOCKER: # pragma: no cover
|
||||||
|
# Internal IP addresses are different when running under docker
|
||||||
|
hostname, ___, ips = socket.gethostbyname_ex(socket.gethostname())
|
||||||
|
INTERNAL_IPS = [ip[: ip.rfind(".")] + ".1" for ip in ips] + ["127.0.0.1", "10.0.2.2"]
|
||||||
|
|
||||||
|
# Allow secure http developer server in debug mode
|
||||||
|
if DEBUG:
|
||||||
|
INSTALLED_APPS.append('sslserver')
|
||||||
|
|
||||||
|
# InvenTree URL configuration
|
||||||
|
|
||||||
|
# Base URL for admin pages (default="admin")
|
||||||
|
INVENTREE_ADMIN_URL = get_setting(
|
||||||
|
'INVENTREE_ADMIN_URL',
|
||||||
|
config_key='admin_url',
|
||||||
|
default_value='admin'
|
||||||
|
)
|
||||||
|
|
||||||
|
ROOT_URLCONF = 'InvenTree.urls'
|
||||||
|
|
||||||
|
TEMPLATES = [
|
||||||
|
{
|
||||||
|
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||||
|
'DIRS': [
|
||||||
|
BASE_DIR.joinpath('templates'),
|
||||||
|
# Allow templates in the reporting directory to be accessed
|
||||||
|
MEDIA_ROOT.joinpath('report'),
|
||||||
|
MEDIA_ROOT.joinpath('label'),
|
||||||
|
],
|
||||||
|
'OPTIONS': {
|
||||||
|
'context_processors': [
|
||||||
|
'django.template.context_processors.debug',
|
||||||
|
'django.template.context_processors.request',
|
||||||
|
'django.template.context_processors.i18n',
|
||||||
|
'django.contrib.auth.context_processors.auth',
|
||||||
|
'django.contrib.messages.context_processors.messages',
|
||||||
|
# Custom InvenTree context processors
|
||||||
|
'InvenTree.context.health_status',
|
||||||
|
'InvenTree.context.status_codes',
|
||||||
|
'InvenTree.context.user_roles',
|
||||||
|
],
|
||||||
|
'loaders': [(
|
||||||
|
'django.template.loaders.cached.Loader', [
|
||||||
|
'plugin.template.PluginTemplateLoader',
|
||||||
|
'django.template.loaders.filesystem.Loader',
|
||||||
|
'django.template.loaders.app_directories.Loader',
|
||||||
|
])
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
if DEBUG_TOOLBAR_ENABLED: # pragma: no cover
|
||||||
|
# Note that the APP_DIRS value must be set when using debug_toolbar
|
||||||
|
# But this will kill template loading for plugins
|
||||||
|
TEMPLATES[0]['APP_DIRS'] = True
|
||||||
|
del TEMPLATES[0]['OPTIONS']['loaders']
|
||||||
|
|
||||||
|
REST_FRAMEWORK = {
|
||||||
|
'EXCEPTION_HANDLER': 'InvenTree.exceptions.exception_handler',
|
||||||
|
'DATETIME_FORMAT': '%Y-%m-%d %H:%M',
|
||||||
|
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||||
|
'rest_framework.authentication.BasicAuthentication',
|
||||||
|
'rest_framework.authentication.SessionAuthentication',
|
||||||
|
'rest_framework.authentication.TokenAuthentication',
|
||||||
|
),
|
||||||
|
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
|
||||||
|
'DEFAULT_PERMISSION_CLASSES': (
|
||||||
|
'rest_framework.permissions.IsAuthenticated',
|
||||||
|
'rest_framework.permissions.DjangoModelPermissions',
|
||||||
|
'InvenTree.permissions.RolePermission',
|
||||||
|
),
|
||||||
|
'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.coreapi.AutoSchema',
|
||||||
|
'DEFAULT_METADATA_CLASS': 'InvenTree.metadata.InvenTreeMetadata',
|
||||||
|
'DEFAULT_RENDERER_CLASSES': [
|
||||||
|
'rest_framework.renderers.JSONRenderer',
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
if DEBUG:
|
||||||
|
# Enable browsable API if in DEBUG mode
|
||||||
|
REST_FRAMEWORK['DEFAULT_RENDERER_CLASSES'].append('rest_framework.renderers.BrowsableAPIRenderer')
|
||||||
|
|
||||||
|
WSGI_APPLICATION = 'InvenTree.wsgi.application'
|
||||||
|
|
||||||
|
"""
|
||||||
|
Configure the database backend based on the user-specified values.
|
||||||
|
|
||||||
|
- Primarily this configuration happens in the config.yaml file
|
||||||
|
- However there may be reason to configure the DB via environmental variables
|
||||||
|
- The following code lets the user "mix and match" database configuration
|
||||||
|
"""
|
||||||
|
|
||||||
|
logger.debug("Configuring database backend:")
|
||||||
|
|
||||||
|
# Extract database configuration from the config.yaml file
|
||||||
|
db_config = CONFIG.get('database', {})
|
||||||
|
|
||||||
|
if not db_config:
|
||||||
|
db_config = {}
|
||||||
|
|
||||||
|
# Environment variables take preference over config file!
|
||||||
|
|
||||||
|
db_keys = ['ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT']
|
||||||
|
|
||||||
|
for key in db_keys:
|
||||||
|
# First, check the environment variables
|
||||||
|
env_key = f"INVENTREE_DB_{key}"
|
||||||
|
env_var = os.environ.get(env_key, None)
|
||||||
|
|
||||||
|
if env_var:
|
||||||
|
# Make use PORT is int
|
||||||
|
if key == 'PORT':
|
||||||
|
try:
|
||||||
|
env_var = int(env_var)
|
||||||
|
except ValueError:
|
||||||
|
logger.error(f"Invalid number for {env_key}: {env_var}")
|
||||||
|
# Override configuration value
|
||||||
|
db_config[key] = env_var
|
||||||
|
|
||||||
|
# Check that required database configuration options are specified
|
||||||
|
required_keys = ['ENGINE', 'NAME']
|
||||||
|
|
||||||
|
for key in required_keys:
|
||||||
|
if key not in db_config: # pragma: no cover
|
||||||
|
error_msg = f'Missing required database configuration value {key}'
|
||||||
|
logger.error(error_msg)
|
||||||
|
|
||||||
|
print('Error: ' + error_msg)
|
||||||
|
sys.exit(-1)
|
||||||
|
|
||||||
|
"""
|
||||||
|
Special considerations for the database 'ENGINE' setting.
|
||||||
|
It can be specified in config.yaml (or envvar) as either (for example):
|
||||||
|
- sqlite3
|
||||||
|
- django.db.backends.sqlite3
|
||||||
|
- django.db.backends.postgresql
|
||||||
|
"""
|
||||||
|
|
||||||
|
db_engine = db_config['ENGINE'].lower()
|
||||||
|
|
||||||
|
# Correct common misspelling
|
||||||
|
if db_engine == 'sqlite':
|
||||||
|
db_engine = 'sqlite3' # pragma: no cover
|
||||||
|
|
||||||
|
if db_engine in ['sqlite3', 'postgresql', 'mysql']:
|
||||||
|
# Prepend the required python module string
|
||||||
|
db_engine = f'django.db.backends.{db_engine}'
|
||||||
|
db_config['ENGINE'] = db_engine
|
||||||
|
|
||||||
|
db_name = db_config['NAME']
|
||||||
|
db_host = db_config.get('HOST', "''")
|
||||||
|
|
||||||
|
if 'sqlite' in db_engine:
|
||||||
|
db_name = str(Path(db_name).resolve())
|
||||||
|
db_config['NAME'] = db_name
|
||||||
|
|
||||||
|
logger.info(f"DB_ENGINE: {db_engine}")
|
||||||
|
logger.info(f"DB_NAME: {db_name}")
|
||||||
|
logger.info(f"DB_HOST: {db_host}")
|
||||||
|
|
||||||
|
"""
|
||||||
|
In addition to base-level database configuration, we may wish to specify specific options to the database backend
|
||||||
|
Ref: https://docs.djangoproject.com/en/3.2/ref/settings/#std:setting-OPTIONS
|
||||||
|
"""
|
||||||
|
|
||||||
|
# 'OPTIONS' or 'options' can be specified in config.yaml
|
||||||
|
# Set useful sensible timeouts for a transactional webserver to communicate
|
||||||
|
# with its database server, that is, if the webserver is having issues
|
||||||
|
# connecting to the database server (such as a replica failover) don't sit and
|
||||||
|
# wait for possibly an hour or more, just tell the client something went wrong
|
||||||
|
# and let the client retry when they want to.
|
||||||
|
db_options = db_config.get("OPTIONS", db_config.get("options", {}))
|
||||||
|
|
||||||
|
# Specific options for postgres backend
|
||||||
|
if "postgres" in db_engine: # pragma: no cover
|
||||||
|
from psycopg2.extensions import (ISOLATION_LEVEL_READ_COMMITTED,
|
||||||
|
ISOLATION_LEVEL_SERIALIZABLE)
|
||||||
|
|
||||||
|
# Connection timeout
|
||||||
|
if "connect_timeout" not in db_options:
|
||||||
|
# The DB server is in the same data center, it should not take very
|
||||||
|
# long to connect to the database server
|
||||||
|
# # seconds, 2 is minium allowed by libpq
|
||||||
|
db_options["connect_timeout"] = int(
|
||||||
|
get_setting('INVENTREE_DB_TIMEOUT', 'database.timeout', 2)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Setup TCP keepalive
|
||||||
|
# DB server is in the same DC, it should not become unresponsive for
|
||||||
|
# very long. With the defaults below we wait 5 seconds for the network
|
||||||
|
# issue to resolve itself. It it that doesn't happen whatever happened
|
||||||
|
# is probably fatal and no amount of waiting is going to fix it.
|
||||||
|
# # 0 - TCP Keepalives disabled; 1 - enabled
|
||||||
|
if "keepalives" not in db_options:
|
||||||
|
db_options["keepalives"] = int(
|
||||||
|
get_setting('INVENTREE_DB_TCP_KEEPALIVES', 'database.tcp_keepalives', 1)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Seconds after connection is idle to send keep alive
|
||||||
|
if "keepalives_idle" not in db_options:
|
||||||
|
db_options["keepalives_idle"] = int(
|
||||||
|
get_setting('INVENTREE_DB_TCP_KEEPALIVES_IDLE', 'database.tcp_keepalives_idle', 1)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Seconds after missing ACK to send another keep alive
|
||||||
|
if "keepalives_interval" not in db_options:
|
||||||
|
db_options["keepalives_interval"] = int(
|
||||||
|
get_setting("INVENTREE_DB_TCP_KEEPALIVES_INTERVAL", "database.tcp_keepalives_internal", "1")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Number of missing ACKs before we close the connection
|
||||||
|
if "keepalives_count" not in db_options:
|
||||||
|
db_options["keepalives_count"] = int(
|
||||||
|
get_setting("INVENTREE_DB_TCP_KEEPALIVES_COUNT", "database.tcp_keepalives_count", "5")
|
||||||
|
)
|
||||||
|
|
||||||
|
# # Milliseconds for how long pending data should remain unacked
|
||||||
|
# by the remote server
|
||||||
|
# TODO: Supported starting in PSQL 11
|
||||||
|
# "tcp_user_timeout": int(os.getenv("PGTCP_USER_TIMEOUT", "1000"),
|
||||||
|
|
||||||
|
# Postgres's default isolation level is Read Committed which is
|
||||||
|
# normally fine, but most developers think the database server is
|
||||||
|
# actually going to do Serializable type checks on the queries to
|
||||||
|
# protect against simultaneous changes.
|
||||||
|
# https://www.postgresql.org/docs/devel/transaction-iso.html
|
||||||
|
# https://docs.djangoproject.com/en/3.2/ref/databases/#isolation-level
|
||||||
|
if "isolation_level" not in db_options:
|
||||||
|
serializable = get_boolean_setting('INVENTREE_DB_ISOLATION_SERIALIZABLE', 'database.serializable', False)
|
||||||
|
db_options["isolation_level"] = ISOLATION_LEVEL_SERIALIZABLE if serializable else ISOLATION_LEVEL_READ_COMMITTED
|
||||||
|
|
||||||
|
# Specific options for MySql / MariaDB backend
|
||||||
|
elif "mysql" in db_engine: # pragma: no cover
|
||||||
|
# TODO TCP time outs and keepalives
|
||||||
|
|
||||||
|
# MariaDB's default isolation level is Repeatable Read which is
|
||||||
|
# normally fine, but most developers think the database server is
|
||||||
|
# actually going to Serializable type checks on the queries to
|
||||||
|
# protect against siumltaneous changes.
|
||||||
|
# https://mariadb.com/kb/en/mariadb-transactions-and-isolation-levels-for-sql-server-users/#changing-the-isolation-level
|
||||||
|
# https://docs.djangoproject.com/en/3.2/ref/databases/#mysql-isolation-level
|
||||||
|
if "isolation_level" not in db_options:
|
||||||
|
serializable = get_boolean_setting('INVENTREE_DB_ISOLATION_SERIALIZABLE', 'database.serializable', False)
|
||||||
|
db_options["isolation_level"] = "serializable" if serializable else "read committed"
|
||||||
|
|
||||||
|
# Specific options for sqlite backend
|
||||||
|
elif "sqlite" in db_engine:
|
||||||
|
# TODO: Verify timeouts are not an issue because no network is involved for SQLite
|
||||||
|
|
||||||
|
# SQLite's default isolation level is Serializable due to SQLite's
|
||||||
|
# single writer implementation. Presumably as a result of this, it is
|
||||||
|
# not possible to implement any lower isolation levels in SQLite.
|
||||||
|
# https://www.sqlite.org/isolation.html
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Provide OPTIONS dict back to the database configuration dict
|
||||||
|
db_config['OPTIONS'] = db_options
|
||||||
|
|
||||||
|
# Set testing options for the database
|
||||||
|
db_config['TEST'] = {
|
||||||
|
'CHARSET': 'utf8',
|
||||||
|
}
|
||||||
|
|
||||||
|
# Set collation option for mysql test database
|
||||||
|
if 'mysql' in db_engine:
|
||||||
|
db_config['TEST']['COLLATION'] = 'utf8_general_ci' # pragma: no cover
|
||||||
|
|
||||||
|
DATABASES = {
|
||||||
|
'default': db_config
|
||||||
|
}
|
||||||
|
|
||||||
|
# login settings
|
||||||
|
REMOTE_LOGIN = get_boolean_setting('INVENTREE_REMOTE_LOGIN', 'remote_login_enabled', False)
|
||||||
|
REMOTE_LOGIN_HEADER = get_setting('INVENTREE_REMOTE_LOGIN_HEADER', 'remote_login_header', 'REMOTE_USER')
|
||||||
|
|
||||||
|
# sentry.io integration for error reporting
|
||||||
|
SENTRY_ENABLED = get_boolean_setting('INVENTREE_SENTRY_ENABLED', 'sentry_enabled', False)
|
||||||
|
# Default Sentry DSN (can be overriden if user wants custom sentry integration)
|
||||||
|
INVENTREE_DSN = 'https://3928ccdba1d34895abde28031fd00100@o378676.ingest.sentry.io/6494600'
|
||||||
|
SENTRY_DSN = get_setting('INVENTREE_SENTRY_DSN', 'sentry_dsn', INVENTREE_DSN)
|
||||||
|
SENTRY_SAMPLE_RATE = float(get_setting('INVENTREE_SENTRY_SAMPLE_RATE', 'sentry_sample_rate', 0.1))
|
||||||
|
|
||||||
|
if SENTRY_ENABLED and SENTRY_DSN: # pragma: no cover
|
||||||
|
sentry_sdk.init(
|
||||||
|
dsn=SENTRY_DSN,
|
||||||
|
integrations=[DjangoIntegration(), ],
|
||||||
|
traces_sample_rate=1.0 if DEBUG else SENTRY_SAMPLE_RATE,
|
||||||
|
send_default_pii=True
|
||||||
|
)
|
||||||
|
inventree_tags = {
|
||||||
|
'testing': TESTING,
|
||||||
|
'docker': DOCKER,
|
||||||
|
'debug': DEBUG,
|
||||||
|
'remote': REMOTE_LOGIN,
|
||||||
|
}
|
||||||
|
for key, val in inventree_tags.items():
|
||||||
|
sentry_sdk.set_tag(f'inventree_{key}', val)
|
||||||
|
|
||||||
|
# Cache configuration
|
||||||
|
cache_host = get_setting('INVENTREE_CACHE_HOST', 'cache.host', None)
|
||||||
|
cache_port = get_setting('INVENTREE_CACHE_PORT', 'cache.port', '6379', typecast=int)
|
||||||
|
|
||||||
|
if cache_host: # pragma: no cover
|
||||||
|
# We are going to rely upon a possibly non-localhost for our cache,
|
||||||
|
# so don't wait too long for the cache as nothing in the cache should be
|
||||||
|
# irreplacable.
|
||||||
|
_cache_options = {
|
||||||
|
"CLIENT_CLASS": "django_redis.client.DefaultClient",
|
||||||
|
"SOCKET_CONNECT_TIMEOUT": int(os.getenv("CACHE_CONNECT_TIMEOUT", "2")),
|
||||||
|
"SOCKET_TIMEOUT": int(os.getenv("CACHE_SOCKET_TIMEOUT", "2")),
|
||||||
|
"CONNECTION_POOL_KWARGS": {
|
||||||
|
"socket_keepalive": config.is_true(
|
||||||
|
os.getenv("CACHE_TCP_KEEPALIVE", "1")
|
||||||
|
),
|
||||||
|
"socket_keepalive_options": {
|
||||||
|
socket.TCP_KEEPCNT: int(
|
||||||
|
os.getenv("CACHE_KEEPALIVES_COUNT", "5")
|
||||||
|
),
|
||||||
|
socket.TCP_KEEPIDLE: int(
|
||||||
|
os.getenv("CACHE_KEEPALIVES_IDLE", "1")
|
||||||
|
),
|
||||||
|
socket.TCP_KEEPINTVL: int(
|
||||||
|
os.getenv("CACHE_KEEPALIVES_INTERVAL", "1")
|
||||||
|
),
|
||||||
|
socket.TCP_USER_TIMEOUT: int(
|
||||||
|
os.getenv("CACHE_TCP_USER_TIMEOUT", "1000")
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
CACHES = {
|
||||||
|
"default": {
|
||||||
|
"BACKEND": "django_redis.cache.RedisCache",
|
||||||
|
"LOCATION": f"redis://{cache_host}:{cache_port}/0",
|
||||||
|
"OPTIONS": _cache_options,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
CACHES = {
|
||||||
|
"default": {
|
||||||
|
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
_q_worker_timeout = int(get_setting('INVENTREE_BACKGROUND_TIMEOUT', 'background.timeout', 90))
|
||||||
|
|
||||||
|
# django-q background worker configuration
|
||||||
|
Q_CLUSTER = {
|
||||||
|
'name': 'InvenTree',
|
||||||
|
'label': 'Background Tasks',
|
||||||
|
'workers': int(get_setting('INVENTREE_BACKGROUND_WORKERS', 'background.workers', 4)),
|
||||||
|
'timeout': _q_worker_timeout,
|
||||||
|
'retry': min(120, _q_worker_timeout + 30),
|
||||||
|
'max_attempts': int(get_setting('INVENTREE_BACKGROUND_MAX_ATTEMPTS', 'background.max_attempts', 5)),
|
||||||
|
'queue_limit': 50,
|
||||||
|
'catch_up': False,
|
||||||
|
'bulk': 10,
|
||||||
|
'orm': 'default',
|
||||||
|
'cache': 'default',
|
||||||
|
'sync': False,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Configure django-q sentry integration
|
||||||
|
if SENTRY_ENABLED and SENTRY_DSN:
|
||||||
|
Q_CLUSTER['error_reporter'] = {
|
||||||
|
'sentry': {
|
||||||
|
'dsn': SENTRY_DSN
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if cache_host: # pragma: no cover
|
||||||
|
# If using external redis cache, make the cache the broker for Django Q
|
||||||
|
# as well
|
||||||
|
Q_CLUSTER["django_redis"] = "worker"
|
||||||
|
|
||||||
|
# database user sessions
|
||||||
|
SESSION_ENGINE = 'user_sessions.backends.db'
|
||||||
|
LOGOUT_REDIRECT_URL = get_setting('INVENTREE_LOGOUT_REDIRECT_URL', 'logout_redirect_url', 'index')
|
||||||
|
SILENCED_SYSTEM_CHECKS = [
|
||||||
|
'admin.E410',
|
||||||
|
]
|
||||||
|
|
||||||
|
# Password validation
|
||||||
|
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
|
||||||
|
|
||||||
|
AUTH_PASSWORD_VALIDATORS = [
|
||||||
|
{
|
||||||
|
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Extra (optional) URL validators
|
||||||
|
# See https://docs.djangoproject.com/en/2.2/ref/validators/#django.core.validators.URLValidator
|
||||||
|
|
||||||
|
EXTRA_URL_SCHEMES = get_setting('INVENTREE_EXTRA_URL_SCHEMES', 'extra_url_schemes', [])
|
||||||
|
|
||||||
|
if type(EXTRA_URL_SCHEMES) not in [list]: # pragma: no cover
|
||||||
|
logger.warning("extra_url_schemes not correctly formatted")
|
||||||
|
EXTRA_URL_SCHEMES = []
|
||||||
|
|
||||||
|
# Internationalization
|
||||||
|
# https://docs.djangoproject.com/en/dev/topics/i18n/
|
||||||
|
LANGUAGE_CODE = get_setting('INVENTREE_LANGUAGE', 'language', 'en-us')
|
||||||
|
# Store language settings for 30 days
|
||||||
|
LANGUAGE_COOKIE_AGE = 2592000
|
||||||
|
|
||||||
|
# If a new language translation is supported, it must be added here
|
||||||
|
LANGUAGES = [
|
||||||
|
('cs', _('Czech')),
|
||||||
|
('da', _('Danish')),
|
||||||
|
('de', _('German')),
|
||||||
|
('el', _('Greek')),
|
||||||
|
('en', _('English')),
|
||||||
|
('es', _('Spanish')),
|
||||||
|
('es-mx', _('Spanish (Mexican)')),
|
||||||
|
('fa', _('Farsi / Persian')),
|
||||||
|
('fr', _('French')),
|
||||||
|
('he', _('Hebrew')),
|
||||||
|
('hu', _('Hungarian')),
|
||||||
|
('it', _('Italian')),
|
||||||
|
('ja', _('Japanese')),
|
||||||
|
('ko', _('Korean')),
|
||||||
|
('nl', _('Dutch')),
|
||||||
|
('no', _('Norwegian')),
|
||||||
|
('pl', _('Polish')),
|
||||||
|
('pt', _('Portuguese')),
|
||||||
|
('pt-BR', _('Portuguese (Brazilian)')),
|
||||||
|
('ru', _('Russian')),
|
||||||
|
('sl', _('Slovenian')),
|
||||||
|
('sv', _('Swedish')),
|
||||||
|
('th', _('Thai')),
|
||||||
|
('tr', _('Turkish')),
|
||||||
|
('vi', _('Vietnamese')),
|
||||||
|
('zh-hans', _('Chinese')),
|
||||||
|
]
|
||||||
|
|
||||||
|
# Testing interface translations
|
||||||
|
if get_boolean_setting('TEST_TRANSLATIONS', default_value=False): # pragma: no cover
|
||||||
|
# Set default language
|
||||||
|
LANGUAGE_CODE = 'xx'
|
||||||
|
|
||||||
|
# Add to language catalog
|
||||||
|
LANGUAGES.append(('xx', 'Test'))
|
||||||
|
|
||||||
|
# Add custom languages not provided by Django
|
||||||
|
EXTRA_LANG_INFO = {
|
||||||
|
'xx': {
|
||||||
|
'code': 'xx',
|
||||||
|
'name': 'Test',
|
||||||
|
'name_local': 'Test'
|
||||||
|
},
|
||||||
|
}
|
||||||
|
LANG_INFO = dict(django.conf.locale.LANG_INFO, **EXTRA_LANG_INFO)
|
||||||
|
django.conf.locale.LANG_INFO = LANG_INFO
|
||||||
|
|
||||||
|
# Currencies available for use
|
||||||
|
CURRENCIES = get_setting(
|
||||||
|
'INVENTREE_CURRENCIES', 'currencies',
|
||||||
|
['AUD', 'CAD', 'CNY', 'EUR', 'GBP', 'JPY', 'NZD', 'USD'],
|
||||||
|
typecast=list,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Maximum number of decimal places for currency rendering
|
||||||
|
CURRENCY_DECIMAL_PLACES = 6
|
||||||
|
|
||||||
|
# Check that each provided currency is supported
|
||||||
|
for currency in CURRENCIES:
|
||||||
|
if currency not in moneyed.CURRENCIES: # pragma: no cover
|
||||||
|
logger.error(f"Currency code '{currency}' is not supported")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Custom currency exchange backend
|
||||||
|
EXCHANGE_BACKEND = 'InvenTree.exchange.InvenTreeExchange'
|
||||||
|
|
||||||
|
# Email configuration options
|
||||||
|
EMAIL_BACKEND = get_setting('INVENTREE_EMAIL_BACKEND', 'email.backend', 'django.core.mail.backends.smtp.EmailBackend')
|
||||||
|
EMAIL_HOST = get_setting('INVENTREE_EMAIL_HOST', 'email.host', '')
|
||||||
|
EMAIL_PORT = get_setting('INVENTREE_EMAIL_PORT', 'email.port', 25, typecast=int)
|
||||||
|
EMAIL_HOST_USER = get_setting('INVENTREE_EMAIL_USERNAME', 'email.username', '')
|
||||||
|
EMAIL_HOST_PASSWORD = get_setting('INVENTREE_EMAIL_PASSWORD', 'email.password', '')
|
||||||
|
EMAIL_SUBJECT_PREFIX = get_setting('INVENTREE_EMAIL_PREFIX', 'email.prefix', '[InvenTree] ')
|
||||||
|
EMAIL_USE_TLS = get_boolean_setting('INVENTREE_EMAIL_TLS', 'email.tls', False)
|
||||||
|
EMAIL_USE_SSL = get_boolean_setting('INVENTREE_EMAIL_SSL', 'email.ssl', False)
|
||||||
|
|
||||||
|
DEFAULT_FROM_EMAIL = get_setting('INVENTREE_EMAIL_SENDER', 'email.sender', '')
|
||||||
|
|
||||||
|
EMAIL_USE_LOCALTIME = False
|
||||||
|
EMAIL_TIMEOUT = 60
|
||||||
|
|
||||||
|
LOCALE_PATHS = (
|
||||||
|
BASE_DIR.joinpath('locale/'),
|
||||||
|
)
|
||||||
|
|
||||||
|
TIME_ZONE = get_setting('INVENTREE_TIMEZONE', 'timezone', 'UTC')
|
||||||
|
|
||||||
|
USE_I18N = True
|
||||||
|
|
||||||
|
USE_L10N = True
|
||||||
|
|
||||||
|
# Do not use native timezone support in "test" mode
|
||||||
|
# It generates a *lot* of cruft in the logs
|
||||||
|
if not TESTING:
|
||||||
|
USE_TZ = True # pragma: no cover
|
||||||
|
|
||||||
|
DATE_INPUT_FORMATS = [
|
||||||
|
"%Y-%m-%d",
|
||||||
|
]
|
||||||
|
|
||||||
|
# crispy forms use the bootstrap templates
|
||||||
|
CRISPY_TEMPLATE_PACK = 'bootstrap4'
|
||||||
|
|
||||||
|
# Use database transactions when importing / exporting data
|
||||||
|
IMPORT_EXPORT_USE_TRANSACTIONS = True
|
||||||
|
|
||||||
|
SITE_ID = 1
|
||||||
|
|
||||||
|
# Load the allauth social backends
|
||||||
|
SOCIAL_BACKENDS = get_setting('INVENTREE_SOCIAL_BACKENDS', 'social_backends', [], typecast=list)
|
||||||
|
|
||||||
|
for app in SOCIAL_BACKENDS:
|
||||||
|
INSTALLED_APPS.append(app) # pragma: no cover
|
||||||
|
|
||||||
|
SOCIALACCOUNT_PROVIDERS = get_setting('INVENTREE_SOCIAL_PROVIDERS', 'social_providers', None, typecast=dict)
|
||||||
|
|
||||||
|
SOCIALACCOUNT_STORE_TOKENS = True
|
||||||
|
|
||||||
|
# settings for allauth
|
||||||
|
ACCOUNT_EMAIL_CONFIRMATION_EXPIRE_DAYS = get_setting('INVENTREE_LOGIN_CONFIRM_DAYS', 'login_confirm_days', 3, typecast=int)
|
||||||
|
ACCOUNT_LOGIN_ATTEMPTS_LIMIT = get_setting('INVENTREE_LOGIN_ATTEMPTS', 'login_attempts', 5, typecast=int)
|
||||||
|
ACCOUNT_DEFAULT_HTTP_PROTOCOL = get_setting('INVENTREE_LOGIN_DEFAULT_HTTP_PROTOCOL', 'login_default_protocol', 'http')
|
||||||
|
ACCOUNT_LOGOUT_ON_PASSWORD_CHANGE = True
|
||||||
|
ACCOUNT_PREVENT_ENUMERATION = True
|
||||||
|
|
||||||
|
# override forms / adapters
|
||||||
|
ACCOUNT_FORMS = {
|
||||||
|
'login': 'allauth.account.forms.LoginForm',
|
||||||
|
'signup': 'InvenTree.forms.CustomSignupForm',
|
||||||
|
'add_email': 'allauth.account.forms.AddEmailForm',
|
||||||
|
'change_password': 'allauth.account.forms.ChangePasswordForm',
|
||||||
|
'set_password': 'allauth.account.forms.SetPasswordForm',
|
||||||
|
'reset_password': 'allauth.account.forms.ResetPasswordForm',
|
||||||
|
'reset_password_from_key': 'allauth.account.forms.ResetPasswordKeyForm',
|
||||||
|
'disconnect': 'allauth.socialaccount.forms.DisconnectForm',
|
||||||
|
}
|
||||||
|
|
||||||
|
SOCIALACCOUNT_ADAPTER = 'InvenTree.forms.CustomSocialAccountAdapter'
|
||||||
|
ACCOUNT_ADAPTER = 'InvenTree.forms.CustomAccountAdapter'
|
||||||
|
|
||||||
|
# Markdownify configuration
|
||||||
|
# Ref: https://django-markdownify.readthedocs.io/en/latest/settings.html
|
||||||
|
|
||||||
|
MARKDOWNIFY = {
|
||||||
|
'default': {
|
||||||
|
'BLEACH': True,
|
||||||
|
'WHITELIST_ATTRS': [
|
||||||
|
'href',
|
||||||
|
'src',
|
||||||
|
'alt',
|
||||||
|
],
|
||||||
|
'MARKDOWN_EXTENSIONS': [
|
||||||
|
'markdown.extensions.extra'
|
||||||
|
],
|
||||||
|
'WHITELIST_TAGS': [
|
||||||
|
'a',
|
||||||
|
'abbr',
|
||||||
|
'b',
|
||||||
|
'blockquote',
|
||||||
|
'em',
|
||||||
|
'h1', 'h2', 'h3',
|
||||||
|
'i',
|
||||||
|
'img',
|
||||||
|
'li',
|
||||||
|
'ol',
|
||||||
|
'p',
|
||||||
|
'strong',
|
||||||
|
'ul',
|
||||||
|
'table',
|
||||||
|
'thead',
|
||||||
|
'tbody',
|
||||||
|
'th',
|
||||||
|
'tr',
|
||||||
|
'td'
|
||||||
|
],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Ignore these error typeps for in-database error logging
|
||||||
|
IGNORED_ERRORS = [
|
||||||
|
Http404,
|
||||||
|
django.core.exceptions.PermissionDenied,
|
||||||
|
]
|
||||||
|
|
||||||
|
# Maintenance mode
|
||||||
|
MAINTENANCE_MODE_RETRY_AFTER = 60
|
||||||
|
MAINTENANCE_MODE_STATE_BACKEND = 'maintenance_mode.backends.StaticStorageBackend'
|
||||||
|
|
||||||
|
# Are plugins enabled?
|
||||||
|
PLUGINS_ENABLED = get_boolean_setting('INVENTREE_PLUGINS_ENABLED', 'plugins_enabled', False)
|
||||||
|
|
||||||
|
PLUGIN_FILE = config.get_plugin_file()
|
||||||
|
|
||||||
|
# Plugin test settings
|
||||||
|
PLUGIN_TESTING = get_setting('INVENTREE_PLUGIN_TESTING', 'PLUGIN_TESTING', TESTING) # Are plugins beeing tested?
|
||||||
|
PLUGIN_TESTING_SETUP = get_setting('INVENTREE_PLUGIN_TESTING_SETUP', 'PLUGIN_TESTING_SETUP', False) # Load plugins from setup hooks in testing?
|
||||||
|
PLUGIN_TESTING_EVENTS = False # Flag if events are tested right now
|
||||||
|
PLUGIN_RETRY = get_setting('INVENTREE_PLUGIN_RETRY', 'PLUGIN_RETRY', 5) # How often should plugin loading be tried?
|
||||||
|
PLUGIN_FILE_CHECKED = False # Was the plugin file checked?
|
||||||
|
|
||||||
|
# User interface customization values
|
||||||
|
CUSTOM_LOGO = get_custom_file('INVENTREE_CUSTOM_LOGO', 'customize.logo', 'custom logo', lookup_media=True)
|
||||||
|
CUSTOM_SPLASH = get_custom_file('INVENTREE_CUSTOM_SPLASH', 'customize.splash', 'custom splash')
|
||||||
|
|
||||||
|
CUSTOMIZE = get_setting('INVENTREE_CUSTOMIZE', 'customize', {})
|
||||||
|
if DEBUG:
|
||||||
|
logger.info("InvenTree running with DEBUG enabled")
|
||||||
|
|
||||||
|
logger.info(f"MEDIA_ROOT: '{MEDIA_ROOT}'")
|
||||||
|
logger.info(f"STATIC_ROOT: '{STATIC_ROOT}'")
|
68
sample.env
Normal file
68
sample.env
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
# InvenTree environment variables for a postgresql production setup
|
||||||
|
COMPOSE_PROJECT_NAME=inventree
|
||||||
|
|
||||||
|
# Location of persistent database data (stored external to the docker containers)
|
||||||
|
# Note: You *must* un-comment this line, and point it to a path on your local machine
|
||||||
|
|
||||||
|
# e.g. Linux
|
||||||
|
INVENTREE_EXT_VOLUME=data
|
||||||
|
|
||||||
|
# e.g. Windows (docker desktop)
|
||||||
|
#INVENTREE_EXT_VOLUME=c:/Users/me/inventree-data
|
||||||
|
|
||||||
|
# Default web port for the InvenTree server
|
||||||
|
INVENTREE_WEB_PORT=8080
|
||||||
|
|
||||||
|
# Ensure debug is false for a production setup
|
||||||
|
INVENTREE_DEBUG=False
|
||||||
|
INVENTREE_LOG_LEVEL=WARNING
|
||||||
|
|
||||||
|
# Database configuration options
|
||||||
|
# Note: The example setup is for a PostgreSQL database
|
||||||
|
INVENTREE_DB_ENGINE=postgresql
|
||||||
|
INVENTREE_DB_NAME=inventree
|
||||||
|
INVENTREE_DB_HOST=inventree-db
|
||||||
|
INVENTREE_DB_PORT=5432
|
||||||
|
|
||||||
|
# Redis cache setup (disabled by default)
|
||||||
|
# Un-comment the following lines to enable Redis cache
|
||||||
|
# Note that you will also have to run docker-compose with the --profile redis command
|
||||||
|
# Refer to settings.py for other cache options
|
||||||
|
#INVENTREE_CACHE_HOST=inventree-cache
|
||||||
|
#INVENTREE_CACHE_PORT=6379
|
||||||
|
|
||||||
|
# Options for gunicorn server
|
||||||
|
INVENTREE_GUNICORN_TIMEOUT=30
|
||||||
|
|
||||||
|
# Enable custom plugins?
|
||||||
|
INVENTREE_PLUGINS_ENABLED=False
|
||||||
|
|
||||||
|
# Image tag that should be used
|
||||||
|
INVENTREE_IMAGE=inventree/inventree:0.10.1
|
||||||
|
REDIS_IMAGE=redis:7.0-alpine
|
||||||
|
NGINX_IMAGE=nginxinc/nginx-unprivileged:stable-alpine
|
||||||
|
# Postgres image must match version of pgdump in inventree image
|
||||||
|
POSTGRES_IMAGE=postgres:13-alpine
|
||||||
|
|
||||||
|
# InvenTree admin account details
|
||||||
|
# make sure to use secure credentials these lines to auto-create an admin acount
|
||||||
|
INVENTREE_ADMIN_USER=admin
|
||||||
|
INVENTREE_ADMIN_PASSWORD=password
|
||||||
|
INVENTREE_ADMIN_EMAIL=admin@inventree.example
|
||||||
|
|
||||||
|
# Database credentials - These must be configured before running
|
||||||
|
# Change from the default values!
|
||||||
|
INVENTREE_DB_USER=inventree
|
||||||
|
INVENTREE_DB_PASSWORD=password
|
||||||
|
|
||||||
|
# Django configuration
|
||||||
|
INVENTREE_SECRET_KEY=some-secret-key
|
||||||
|
ALLOWED_HOSTS=inventree.example.com,www.inventree.example.com
|
||||||
|
|
||||||
|
# SSO Config
|
||||||
|
INVENTREE_SOCIAL_BACKENDS=allauth.socialaccount.providers.keycloak
|
||||||
|
|
||||||
|
HKNG_KEYCLOAK_URL=https://keycloak.example.com
|
||||||
|
HKNG_KEYCLOAK_REALM=master
|
||||||
|
HKNG_KEYCLOAK_CLIENT_ID=example-client
|
||||||
|
HKNG_KEYCLOAK_CLIENT_SECRET=example-secret
|
Loading…
Add table
Reference in a new issue