From 16e3c3fb31ff32a4f5376beff34ea510ce87e09f Mon Sep 17 00:00:00 2001 From: philipp Date: Sat, 18 Mar 2023 12:37:16 +0100 Subject: [PATCH 01/24] initial dump of deployment files --- .gitignore | 2 + Makefile | 24 ++ README.md | 22 ++ docker-compose.yml | 152 ++++++++ nginx.prod.conf | 64 ++++ patch/config.py | 347 +++++++++++++++++ patch/settings.py | 914 +++++++++++++++++++++++++++++++++++++++++++++ sample.env | 68 ++++ 8 files changed, 1593 insertions(+) create mode 100644 .gitignore create mode 100644 Makefile create mode 100644 README.md create mode 100644 docker-compose.yml create mode 100644 nginx.prod.conf create mode 100644 patch/config.py create mode 100644 patch/settings.py create mode 100644 sample.env diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..6ee0103 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +/data +/.env diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..4abc95c --- /dev/null +++ b/Makefile @@ -0,0 +1,24 @@ +CONFIG_FILE = conf/config.env +SECRET_FILE = conf/secrets.env + +COMPOSE_CMD ?= docker compose +# use unix:///run/docker.sock for docker socket, unix://${XDG_RUNTIME_DIR}/podman/podman.sock for podman +DOCKER_HOST ?= unix:///run/docker.sock +COMPOSE = DOCKER_HOST=$(DOCKER_HOST) $(COMPOSE_CMD) + +psql: + $(COMPOSE) exec inventree-db sh -c 'psql $$POSTGRES_USER $$POSTGRES_DB' + +update: + read -p "Update will cause downtime of the server. Are you sure you want to continue? Press Ctrl+c to abort!" _ + $(COMPOSE) down + $(COMPOSE) pull + $(COMPOSE) run inventree-server invoke update + $(COMPOSE) up -d + +data: # podman does not autocreate data folder + mkdir data + +# pass all commands to compose cli +%: data + $(COMPOSE) $@ diff --git a/README.md b/README.md new file mode 100644 index 0000000..598fec6 --- /dev/null +++ b/README.md @@ -0,0 +1,22 @@ +# InvenTree Deployment + +These are the deployment files required to get InvenTree up and running. InvenTree is deployed as a `docker compose` setup and therefore has the following dependencies: + +- Podman/Docker +- Docker Compose +- Make (as script runner) + +## Configuration + +Copy the `sample.env` into a file named `.env` and make sure to adapt all values to your needs, especially secrets. + +## Installation + +In order to run invoke an update or complete the first setup, `make update` is used to pull the latest images and apply all database migrations. + +The command `make up` can be used to run the setup as a foreground service, `make "up -d"` can be used to run the setup in detached mode. + +### SSO + +Login as InvenTree admin user. Under `Settings > Login Settings` make sure to `Enable SSO`. + diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..c7957b7 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,152 @@ +version: "3.8" + +# Docker compose recipe for a production-ready InvenTree setup, with the following containers: +# - PostgreSQL as the database backend +# - gunicorn as the InvenTree web server +# - django-q as the InvenTree background worker process +# - nginx as a reverse proxy +# - redis as the cache manager (optional, disabled by default) + +# --------------------- +# READ BEFORE STARTING! +# --------------------- + +# ----------------------------- +# Setting environment variables +# ----------------------------- +# Shared environment variables should be stored in the .env file +# Changes made to this file are reflected across all containers! +# +# IMPORTANT NOTE: +# You should not have to change *anything* within this docker-compose.yml file! +# Instead, make any changes in the .env file! + +# ------------------------ +# InvenTree Image Versions +# ------------------------ +# By default, this docker-compose script targets the STABLE version of InvenTree, +# image: inventree/inventree:stable +# +# To run the LATEST (development) version of InvenTree, +# change the INVENTREE_TAG variable (in the .env file) to "latest" +# +# Alternatively, you could target a specific tagged release version with (for example): +# INVENTREE_TAG=0.7.5 +# + +services: + # Database service + # Use PostgreSQL as the database backend + inventree-db: + container_name: inventree-db + image: ${POSTGRES_IMAGE:?You must provide the 'POSTGRES_IMAGE' variable in the .env file} + expose: + - ${INVENTREE_DB_PORT:-5432}/tcp + env_file: + - .env + environment: + - PGDATA=/var/lib/postgresql/data/pgdb + - POSTGRES_USER=${INVENTREE_DB_USER:?You must provide the 'INVENTREE_DB_USER' variable in the .env file} + - POSTGRES_PASSWORD=${INVENTREE_DB_PASSWORD:?You must provide the 'INVENTREE_DB_PASSWORD' variable in the .env file} + - POSTGRES_DB=${INVENTREE_DB_NAME:?You must provide the 'INVENTREE_DB_NAME' variable in the .env file} + volumes: + # Map 'data' volume such that postgres database is stored externally + - inventree_data:/var/lib/postgresql/data/:z + restart: unless-stopped + + # redis acts as database cache manager + # only runs under the "redis" profile : https://docs.docker.com/compose/profiles/ + inventree-cache: + container_name: inventree-cache + image: ${REDIS_IMAGE:?You must provide the 'REDIS_IMAGE' variable in the .env file} + depends_on: + - inventree-db + env_file: + - .env + profiles: + - redis + expose: + - ${INVENTREE_CACHE_PORT:-6379} + restart: always + + # InvenTree web server service + # Uses gunicorn as the web server + inventree-server: + container_name: inventree-server + # If you wish to specify a particular InvenTree version, do so here + image: ${INVENTREE_IMAGE:?You must provide the 'INVENTREE_IMAGE' variable in the .env file} + expose: + - 8000 + env_file: + - .env + environment: + INVENTREE_SOCIAL_PROVIDERS: | + { + "keycloak": { + "SERVERS": [ + { + "KEYCLOAK_URL": "${HKNG_KEYCLOAK_URL:?You must provide the 'HKNG_KEYCLOAK_URL' variable in the .env file}", + "KEYCLOAK_REALM": "${HKNG_KEYCLOAK_REALM:?You must provide the 'HKNG_KEYCLOAK_REALM' variable in the .env file}", + "APP": { + "client_id": "${HKNG_KEYCLOAK_CLIENT_ID:?You must provide the 'HKNG_KEYCLOAK_CLIENT_ID' variable in the .env file}", + "secret": "${HKNG_KEYCLOAK_CLIENT_SECRET:?You must provide the 'HKNG_KEYCLOAK_CLIENT_SECRET' variable in the .env file}" + } + } + ] + } + } + depends_on: + - inventree-db + volumes: + # Data volume must map to /home/inventree/data + - inventree_data:/home/inventree/data:z + # ugly backport of 0.11.0 features, to be removed + - ./patch/settings.py:/home/inventree/InvenTree/InvenTree/settings.py:ro,Z + - ./patch/config.py:/home/inventree/InvenTree/InvenTree/config.py:zo,Z + restart: unless-stopped + + # Background worker process handles long-running or periodic tasks + inventree-worker: + container_name: inventree-worker + # If you wish to specify a particular InvenTree version, do so here + image: ${INVENTREE_IMAGE:?You must provide the 'INVENTREE_IMAGE' variable in the .env file} + command: invoke worker + depends_on: + - inventree-server + env_file: + - .env + volumes: + # Data volume must map to /home/inventree/data + - inventree_data:/home/inventree/data:z + restart: unless-stopped + + # nginx acts as a reverse proxy + # static files are served directly by nginx + # media files are served by nginx, although authentication is redirected to inventree-server + # web requests are redirected to gunicorn + # NOTE: You will need to provide a working nginx.conf file! + inventree-proxy: + container_name: inventree-proxy + image: ${NGINX_IMAGE:?You must provide the 'NGINX_IMAGE' variable in the .env file} + depends_on: + - inventree-server + ports: + # Default web port is 1337 (can be changed in the .env file) + - ${INVENTREE_WEB_PORT:-1337}:8080 + volumes: + # Provide nginx configuration file to the container + # Refer to the provided example file as a starting point + - ./nginx.prod.conf:/etc/nginx/conf.d/default.conf:ro,Z + # nginx proxy needs access to static and media files + - inventree_data:/var/www:z + restart: unless-stopped + +volumes: + # Persistent data, stored external to the container(s) + inventree_data: + driver: local + driver_opts: + type: none + o: bind + # This directory specified where InvenTree data are stored "outside" the docker containers + device: ${INVENTREE_EXT_VOLUME:?You must specify the 'INVENTREE_EXT_VOLUME' variable in the .env file!} diff --git a/nginx.prod.conf b/nginx.prod.conf new file mode 100644 index 0000000..1ebdcd2 --- /dev/null +++ b/nginx.prod.conf @@ -0,0 +1,64 @@ +server { + + # Listen for connection on (internal) port 8080 (unprivileged nginx) + listen 8080; + + real_ip_header proxy_protocol; + + location / { + + proxy_set_header Host $http_host; + proxy_set_header X-Forwarded-By $server_addr:$server_port; + proxy_set_header X-Forwarded-For $remote_addr; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header CLIENT_IP $remote_addr; + + proxy_pass_request_headers on; + + proxy_redirect off; + + client_max_body_size 100M; + + proxy_buffering off; + proxy_request_buffering off; + + # Change 'inventree-server' to the name of the inventree server container, + # and '8000' to the INVENTREE_WEB_PORT (if not default) + proxy_pass http://inventree-server:8000; + } + + # Redirect any requests for static files + location /static/ { + alias /var/www/static/; + autoindex on; + + # Caching settings + expires 30d; + add_header Pragma public; + add_header Cache-Control "public"; + } + + # Redirect any requests for media files + location /media/ { + alias /var/www/media/; + + # Media files require user authentication + auth_request /auth; + + # Content header to force download + add_header Content-disposition "attachment"; + } + + # Use the 'user' API endpoint for auth + location /auth { + internal; + + proxy_pass http://inventree-server:8000/auth/; + + proxy_pass_request_body off; + proxy_set_header Content-Length ""; + proxy_set_header X-Original-URI $request_uri; + } + +} diff --git a/patch/config.py b/patch/config.py new file mode 100644 index 0000000..720e7bd --- /dev/null +++ b/patch/config.py @@ -0,0 +1,347 @@ +"""Helper functions for loading InvenTree configuration options.""" + +import datetime +import json +import logging +import os +import random +import shutil +import string +from pathlib import Path + +logger = logging.getLogger('inventree') +CONFIG_DATA = None +CONFIG_LOOKUPS = {} + + +def to_list(value, delimiter=','): + """Take a configuration setting and make sure it is a list. + + For example, we might have a configuration setting taken from the .config file, + which is already a list. + + However, the same setting may be specified via an environment variable, + using a comma delimited string! + """ + + if type(value) in [list, tuple]: + return value + + # Otherwise, force string value + value = str(value) + + return [x.strip() for x in value.split(delimiter)] + + +def to_dict(value): + """Take a configuration setting and make sure it is a dict. + + For example, we might have a configuration setting taken from the .config file, + which is already an object/dict. + + However, the same setting may be specified via an environment variable, + using a valid JSON string! + """ + if value is None: + return {} + + if type(value) == dict: + return value + + try: + return json.loads(value) + except Exception as error: + logger.error(f"Failed to parse value '{value}' as JSON with error {error}. Ensure value is a valid JSON string.") + return {} + + +def is_true(x): + """Shortcut function to determine if a value "looks" like a boolean""" + return str(x).strip().lower() in ['1', 'y', 'yes', 't', 'true', 'on'] + + +def get_base_dir() -> Path: + """Returns the base (top-level) InvenTree directory.""" + return Path(__file__).parent.parent.resolve() + + +def ensure_dir(path: Path) -> None: + """Ensure that a directory exists. + + If it does not exist, create it. + """ + + if not path.exists(): + path.mkdir(parents=True, exist_ok=True) + + +def get_config_file(create=True) -> Path: + """Returns the path of the InvenTree configuration file. + + Note: It will be created it if does not already exist! + """ + base_dir = get_base_dir() + + cfg_filename = os.getenv('INVENTREE_CONFIG_FILE') + + if cfg_filename: + cfg_filename = Path(cfg_filename.strip()).resolve() + else: + # Config file is *not* specified - use the default + cfg_filename = base_dir.joinpath('config.yaml').resolve() + + if not cfg_filename.exists() and create: + print("InvenTree configuration file 'config.yaml' not found - creating default file") + ensure_dir(cfg_filename.parent) + + cfg_template = base_dir.joinpath("config_template.yaml") + shutil.copyfile(cfg_template, cfg_filename) + print(f"Created config file {cfg_filename}") + + return cfg_filename + + +def load_config_data(set_cache: bool = False) -> map: + """Load configuration data from the config file. + + Arguments: + set_cache(bool): If True, the configuration data will be cached for future use after load. + """ + global CONFIG_DATA + + # use cache if populated + # skip cache if cache should be set + if CONFIG_DATA is not None and not set_cache: + return CONFIG_DATA + + import yaml + + cfg_file = get_config_file() + + with open(cfg_file, 'r') as cfg: + data = yaml.safe_load(cfg) + + # Set the cache if requested + if set_cache: + CONFIG_DATA = data + + return data + + +def get_setting(env_var=None, config_key=None, default_value=None, typecast=None): + """Helper function for retrieving a configuration setting value. + + - First preference is to look for the environment variable + - Second preference is to look for the value of the settings file + - Third preference is the default value + + Arguments: + env_var: Name of the environment variable e.g. 'INVENTREE_STATIC_ROOT' + config_key: Key to lookup in the configuration file + default_value: Value to return if first two options are not provided + typecast: Function to use for typecasting the value + """ + def try_typecasting(value, source: str): + """Attempt to typecast the value""" + + # Force 'list' of strings + if typecast is list: + value = to_list(value) + + # Valid JSON string is required + elif typecast is dict: + value = to_dict(value) + + elif typecast is not None: + # Try to typecast the value + try: + val = typecast(value) + set_metadata(source) + return val + except Exception as error: + logger.error(f"Failed to typecast '{env_var}' with value '{value}' to type '{typecast}' with error {error}") + + set_metadata(source) + return value + + def set_metadata(source: str): + """Set lookup metadata for the setting.""" + key = env_var or config_key + CONFIG_LOOKUPS[key] = {'env_var': env_var, 'config_key': config_key, 'source': source, 'accessed': datetime.datetime.now()} + + # First, try to load from the environment variables + if env_var is not None: + val = os.getenv(env_var, None) + + if val is not None: + return try_typecasting(val, 'env') + + # Next, try to load from configuration file + if config_key is not None: + cfg_data = load_config_data() + + result = None + + # Hack to allow 'path traversal' in configuration file + for key in config_key.strip().split('.'): + + if type(cfg_data) is not dict or key not in cfg_data: + result = None + break + + result = cfg_data[key] + cfg_data = cfg_data[key] + + if result is not None: + return try_typecasting(result, 'yaml') + + # Finally, return the default value + return try_typecasting(default_value, 'default') + + +def get_boolean_setting(env_var=None, config_key=None, default_value=False): + """Helper function for retreiving a boolean configuration setting""" + + return is_true(get_setting(env_var, config_key, default_value)) + + +def get_media_dir(create=True): + """Return the absolute path for the 'media' directory (where uploaded files are stored)""" + + md = get_setting('INVENTREE_MEDIA_ROOT', 'media_root') + + if not md: + raise FileNotFoundError('INVENTREE_MEDIA_ROOT not specified') + + md = Path(md).resolve() + + if create: + md.mkdir(parents=True, exist_ok=True) + + return md + + +def get_static_dir(create=True): + """Return the absolute path for the 'static' directory (where static files are stored)""" + + sd = get_setting('INVENTREE_STATIC_ROOT', 'static_root') + + if not sd: + raise FileNotFoundError('INVENTREE_STATIC_ROOT not specified') + + sd = Path(sd).resolve() + + if create: + sd.mkdir(parents=True, exist_ok=True) + + return sd + + +def get_backup_dir(create=True): + """Return the absolute path for the backup directory""" + + bd = get_setting('INVENTREE_BACKUP_DIR', 'backup_dir') + + if not bd: + raise FileNotFoundError('INVENTREE_BACKUP_DIR not specified') + + bd = Path(bd).resolve() + + if create: + bd.mkdir(parents=True, exist_ok=True) + + return bd + + +def get_plugin_file(): + """Returns the path of the InvenTree plugins specification file. + + Note: It will be created if it does not already exist! + """ + + # Check if the plugin.txt file (specifying required plugins) is specified + plugin_file = get_setting('INVENTREE_PLUGIN_FILE', 'plugin_file') + + if not plugin_file: + # If not specified, look in the same directory as the configuration file + config_dir = get_config_file().parent + plugin_file = config_dir.joinpath('plugins.txt') + else: + # Make sure we are using a modern Path object + plugin_file = Path(plugin_file) + + if not plugin_file.exists(): + logger.warning("Plugin configuration file does not exist - creating default file") + logger.info(f"Creating plugin file at '{plugin_file}'") + ensure_dir(plugin_file.parent) + + # If opening the file fails (no write permission, for example), then this will throw an error + plugin_file.write_text("# InvenTree Plugins (uses PIP framework to install)\n\n") + + return plugin_file + + +def get_secret_key(): + """Return the secret key value which will be used by django. + + Following options are tested, in descending order of preference: + + A) Check for environment variable INVENTREE_SECRET_KEY => Use raw key data + B) Check for environment variable INVENTREE_SECRET_KEY_FILE => Load key data from file + C) Look for default key file "secret_key.txt" + D) Create "secret_key.txt" if it does not exist + """ + + # Look for environment variable + if secret_key := get_setting('INVENTREE_SECRET_KEY', 'secret_key'): + logger.info("SECRET_KEY loaded by INVENTREE_SECRET_KEY") # pragma: no cover + return secret_key + + # Look for secret key file + if secret_key_file := get_setting('INVENTREE_SECRET_KEY_FILE', 'secret_key_file'): + secret_key_file = Path(secret_key_file).resolve() + else: + # Default location for secret key file + secret_key_file = get_base_dir().joinpath("secret_key.txt").resolve() + + if not secret_key_file.exists(): + logger.info(f"Generating random key file at '{secret_key_file}'") + ensure_dir(secret_key_file.parent) + + # Create a random key file + options = string.digits + string.ascii_letters + string.punctuation + key = ''.join([random.choice(options) for i in range(100)]) + secret_key_file.write_text(key) + + logger.info(f"Loading SECRET_KEY from '{secret_key_file}'") + + key_data = secret_key_file.read_text().strip() + + return key_data + + +def get_custom_file(env_ref: str, conf_ref: str, log_ref: str, lookup_media: bool = False): + """Returns the checked path to a custom file. + + Set lookup_media to True to also search in the media folder. + """ + from django.contrib.staticfiles.storage import StaticFilesStorage + from django.core.files.storage import default_storage + + value = get_setting(env_ref, conf_ref, None) + + if not value: + return None + + static_storage = StaticFilesStorage() + + if static_storage.exists(value): + logger.info(f"Loading {log_ref} from static directory: {value}") + elif lookup_media and default_storage.exists(value): + logger.info(f"Loading {log_ref} from media directory: {value}") + else: + add_dir_str = ' or media' if lookup_media else '' + logger.warning(f"The {log_ref} file '{value}' could not be found in the static{add_dir_str} directories") + value = False + + return value diff --git a/patch/settings.py b/patch/settings.py new file mode 100644 index 0000000..1aab68d --- /dev/null +++ b/patch/settings.py @@ -0,0 +1,914 @@ +"""Django settings for InvenTree project. + +In practice the settings in this file should not be adjusted, +instead settings can be configured in the config.yaml file +located in the top level project directory. + +This allows implementation configuration to be hidden from source control, +as well as separate configuration parameters from the more complex +database setup in this file. +""" + +import logging +import os +import socket +import sys +from pathlib import Path + +import django.conf.locale +import django.core.exceptions +from django.http import Http404 +from django.utils.translation import gettext_lazy as _ + +import moneyed +import sentry_sdk +from sentry_sdk.integrations.django import DjangoIntegration + +from . import config +from .config import get_boolean_setting, get_custom_file, get_setting + +INVENTREE_NEWS_URL = 'https://inventree.org/news/feed.atom' + +# Determine if we are running in "test" mode e.g. "manage.py test" +TESTING = 'test' in sys.argv + +if TESTING: + + # Use a weaker password hasher for testing (improves testing speed) + PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher',] + + # Enable slow-test-runner + TEST_RUNNER = 'django_slowtests.testrunner.DiscoverSlowestTestsRunner' + NUM_SLOW_TESTS = 25 + + # Note: The following fix is "required" for docker build workflow + # Note: 2022-12-12 still unsure why... + if os.getenv('INVENTREE_DOCKER'): + # Ensure that sys.path includes global python libs + site_packages = '/usr/local/lib/python3.9/site-packages' + + if site_packages not in sys.path: + print("Adding missing site-packages path:", site_packages) + sys.path.append(site_packages) + +# Are environment variables manipulated by tests? Needs to be set by testing code +TESTING_ENV = False + +# New requirement for django 3.2+ +DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' + +# Build paths inside the project like this: BASE_DIR.joinpath(...) +BASE_DIR = config.get_base_dir() + +# Load configuration data +CONFIG = config.load_config_data(set_cache=True) + +# Default action is to run the system in Debug mode +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = get_boolean_setting('INVENTREE_DEBUG', 'debug', True) + +# Configure logging settings +log_level = get_setting('INVENTREE_LOG_LEVEL', 'log_level', 'WARNING') + +logging.basicConfig( + level=log_level, + format="%(asctime)s %(levelname)s %(message)s", +) + +if log_level not in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']: + log_level = 'WARNING' # pragma: no cover + +LOGGING = { + 'version': 1, + 'disable_existing_loggers': False, + 'handlers': { + 'console': { + 'class': 'logging.StreamHandler', + }, + }, + 'root': { + 'handlers': ['console'], + 'level': log_level, + }, + 'filters': { + 'require_not_maintenance_mode_503': { + '()': 'maintenance_mode.logging.RequireNotMaintenanceMode503', + }, + }, +} + +# Get a logger instance for this setup file +logger = logging.getLogger("inventree") + +# Load SECRET_KEY +SECRET_KEY = config.get_secret_key() + +# The filesystem location for served static files +STATIC_ROOT = config.get_static_dir() + +# The filesystem location for uploaded meadia files +MEDIA_ROOT = config.get_media_dir() + +# List of allowed hosts (default = allow all) +ALLOWED_HOSTS = get_setting( + "INVENTREE_ALLOWED_HOSTS", + config_key='allowed_hosts', + default_value=['*'], + typecast=list, +) + +# Cross Origin Resource Sharing (CORS) options + +# Only allow CORS access to API +CORS_URLS_REGEX = r'^/api/.*$' + +# Extract CORS options from configuration file +CORS_ORIGIN_ALLOW_ALL = get_boolean_setting( + "INVENTREE_CORS_ORIGIN_ALLOW_ALL", + config_key='cors.allow_all', + default_value=False, +) + +CORS_ORIGIN_WHITELIST = get_setting( + "INVENTREE_CORS_ORIGIN_WHITELIST", + config_key='cors.whitelist', + default_value=[], + typecast=list, +) + +# Needed for the parts importer, directly impacts the maximum parts that can be uploaded +DATA_UPLOAD_MAX_NUMBER_FIELDS = 10000 + +# Web URL endpoint for served static files +STATIC_URL = '/static/' + +STATICFILES_DIRS = [] + +# Translated Template settings +STATICFILES_I18_PREFIX = 'i18n' +STATICFILES_I18_SRC = BASE_DIR.joinpath('templates', 'js', 'translated') +STATICFILES_I18_TRG = BASE_DIR.joinpath('InvenTree', 'static_i18n') +STATICFILES_DIRS.append(STATICFILES_I18_TRG) +STATICFILES_I18_TRG = STATICFILES_I18_TRG.joinpath(STATICFILES_I18_PREFIX) + +STATFILES_I18_PROCESSORS = [ + 'InvenTree.context.status_codes', +] + +# Color Themes Directory +STATIC_COLOR_THEMES_DIR = STATIC_ROOT.joinpath('css', 'color-themes').resolve() + +# Web URL endpoint for served media files +MEDIA_URL = '/media/' + +# Database backup options +# Ref: https://django-dbbackup.readthedocs.io/en/master/configuration.html +DBBACKUP_SEND_EMAIL = False +DBBACKUP_STORAGE = get_setting( + 'INVENTREE_BACKUP_STORAGE', + 'backup_storage', + 'django.core.files.storage.FileSystemStorage' +) + +# Default backup configuration +DBBACKUP_STORAGE_OPTIONS = get_setting('INVENTREE_BACKUP_OPTIONS', 'backup_options', None) +if DBBACKUP_STORAGE_OPTIONS is None: + DBBACKUP_STORAGE_OPTIONS = { + 'location': config.get_backup_dir(), + } + +# Application definition + +INSTALLED_APPS = [ + # Admin site integration + 'django.contrib.admin', + + # InvenTree apps + 'build.apps.BuildConfig', + 'common.apps.CommonConfig', + 'company.apps.CompanyConfig', + 'label.apps.LabelConfig', + 'order.apps.OrderConfig', + 'part.apps.PartConfig', + 'report.apps.ReportConfig', + 'stock.apps.StockConfig', + 'users.apps.UsersConfig', + 'plugin.apps.PluginAppConfig', + 'InvenTree.apps.InvenTreeConfig', # InvenTree app runs last + + # Core django modules + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'user_sessions', # db user sessions + 'django.contrib.messages', + 'django.contrib.staticfiles', + 'django.contrib.sites', + + # Maintenance + 'maintenance_mode', + + # Third part add-ons + 'django_filters', # Extended filter functionality + 'rest_framework', # DRF (Django Rest Framework) + 'rest_framework.authtoken', # Token authentication for API + 'corsheaders', # Cross-origin Resource Sharing for DRF + 'crispy_forms', # Improved form rendering + 'import_export', # Import / export tables to file + 'django_cleanup.apps.CleanupConfig', # Automatically delete orphaned MEDIA files + 'mptt', # Modified Preorder Tree Traversal + 'markdownify', # Markdown template rendering + 'djmoney', # django-money integration + 'djmoney.contrib.exchange', # django-money exchange rates + 'error_report', # Error reporting in the admin interface + 'django_q', + 'formtools', # Form wizard tools + 'dbbackup', # Backups - django-dbbackup + + 'allauth', # Base app for SSO + 'allauth.account', # Extend user with accounts + 'allauth.socialaccount', # Use 'social' providers + + 'django_otp', # OTP is needed for MFA - base package + 'django_otp.plugins.otp_totp', # Time based OTP + 'django_otp.plugins.otp_static', # Backup codes + + 'allauth_2fa', # MFA flow for allauth + + 'django_ical', # For exporting calendars +] + +MIDDLEWARE = CONFIG.get('middleware', [ + 'django.middleware.security.SecurityMiddleware', + 'x_forwarded_for.middleware.XForwardedForMiddleware', + 'user_sessions.middleware.SessionMiddleware', # db user sessions + 'django.middleware.locale.LocaleMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'corsheaders.middleware.CorsMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'InvenTree.middleware.InvenTreeRemoteUserMiddleware', # Remote / proxy auth + 'django_otp.middleware.OTPMiddleware', # MFA support + 'InvenTree.middleware.CustomAllauthTwoFactorMiddleware', # Flow control for allauth + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', + 'InvenTree.middleware.AuthRequiredMiddleware', + 'InvenTree.middleware.Check2FAMiddleware', # Check if the user should be forced to use MFA + 'maintenance_mode.middleware.MaintenanceModeMiddleware', + 'InvenTree.middleware.InvenTreeExceptionProcessor', # Error reporting +]) + +AUTHENTICATION_BACKENDS = CONFIG.get('authentication_backends', [ + 'django.contrib.auth.backends.RemoteUserBackend', # proxy login + 'django.contrib.auth.backends.ModelBackend', + 'allauth.account.auth_backends.AuthenticationBackend', # SSO login via external providers +]) + +DEBUG_TOOLBAR_ENABLED = DEBUG and get_setting('INVENTREE_DEBUG_TOOLBAR', 'debug_toolbar', False) + +# If the debug toolbar is enabled, add the modules +if DEBUG_TOOLBAR_ENABLED: # pragma: no cover + logger.info("Running with DEBUG_TOOLBAR enabled") + INSTALLED_APPS.append('debug_toolbar') + MIDDLEWARE.append('debug_toolbar.middleware.DebugToolbarMiddleware') + + DEBUG_TOOLBAR_CONFIG = { + 'RESULTS_CACHE_SIZE': 100, + 'OBSERVE_REQUEST_CALLBACK': lambda x: False, + } + +# Internal IP addresses allowed to see the debug toolbar +INTERNAL_IPS = [ + '127.0.0.1', +] + +# Internal flag to determine if we are running in docker mode +DOCKER = get_boolean_setting('INVENTREE_DOCKER', default_value=False) + +if DOCKER: # pragma: no cover + # Internal IP addresses are different when running under docker + hostname, ___, ips = socket.gethostbyname_ex(socket.gethostname()) + INTERNAL_IPS = [ip[: ip.rfind(".")] + ".1" for ip in ips] + ["127.0.0.1", "10.0.2.2"] + +# Allow secure http developer server in debug mode +if DEBUG: + INSTALLED_APPS.append('sslserver') + +# InvenTree URL configuration + +# Base URL for admin pages (default="admin") +INVENTREE_ADMIN_URL = get_setting( + 'INVENTREE_ADMIN_URL', + config_key='admin_url', + default_value='admin' +) + +ROOT_URLCONF = 'InvenTree.urls' + +TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [ + BASE_DIR.joinpath('templates'), + # Allow templates in the reporting directory to be accessed + MEDIA_ROOT.joinpath('report'), + MEDIA_ROOT.joinpath('label'), + ], + 'OPTIONS': { + 'context_processors': [ + 'django.template.context_processors.debug', + 'django.template.context_processors.request', + 'django.template.context_processors.i18n', + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + # Custom InvenTree context processors + 'InvenTree.context.health_status', + 'InvenTree.context.status_codes', + 'InvenTree.context.user_roles', + ], + 'loaders': [( + 'django.template.loaders.cached.Loader', [ + 'plugin.template.PluginTemplateLoader', + 'django.template.loaders.filesystem.Loader', + 'django.template.loaders.app_directories.Loader', + ]) + ], + }, + }, +] + +if DEBUG_TOOLBAR_ENABLED: # pragma: no cover + # Note that the APP_DIRS value must be set when using debug_toolbar + # But this will kill template loading for plugins + TEMPLATES[0]['APP_DIRS'] = True + del TEMPLATES[0]['OPTIONS']['loaders'] + +REST_FRAMEWORK = { + 'EXCEPTION_HANDLER': 'InvenTree.exceptions.exception_handler', + 'DATETIME_FORMAT': '%Y-%m-%d %H:%M', + 'DEFAULT_AUTHENTICATION_CLASSES': ( + 'rest_framework.authentication.BasicAuthentication', + 'rest_framework.authentication.SessionAuthentication', + 'rest_framework.authentication.TokenAuthentication', + ), + 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination', + 'DEFAULT_PERMISSION_CLASSES': ( + 'rest_framework.permissions.IsAuthenticated', + 'rest_framework.permissions.DjangoModelPermissions', + 'InvenTree.permissions.RolePermission', + ), + 'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.coreapi.AutoSchema', + 'DEFAULT_METADATA_CLASS': 'InvenTree.metadata.InvenTreeMetadata', + 'DEFAULT_RENDERER_CLASSES': [ + 'rest_framework.renderers.JSONRenderer', + ] +} + +if DEBUG: + # Enable browsable API if in DEBUG mode + REST_FRAMEWORK['DEFAULT_RENDERER_CLASSES'].append('rest_framework.renderers.BrowsableAPIRenderer') + +WSGI_APPLICATION = 'InvenTree.wsgi.application' + +""" +Configure the database backend based on the user-specified values. + +- Primarily this configuration happens in the config.yaml file +- However there may be reason to configure the DB via environmental variables +- The following code lets the user "mix and match" database configuration +""" + +logger.debug("Configuring database backend:") + +# Extract database configuration from the config.yaml file +db_config = CONFIG.get('database', {}) + +if not db_config: + db_config = {} + +# Environment variables take preference over config file! + +db_keys = ['ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT'] + +for key in db_keys: + # First, check the environment variables + env_key = f"INVENTREE_DB_{key}" + env_var = os.environ.get(env_key, None) + + if env_var: + # Make use PORT is int + if key == 'PORT': + try: + env_var = int(env_var) + except ValueError: + logger.error(f"Invalid number for {env_key}: {env_var}") + # Override configuration value + db_config[key] = env_var + +# Check that required database configuration options are specified +required_keys = ['ENGINE', 'NAME'] + +for key in required_keys: + if key not in db_config: # pragma: no cover + error_msg = f'Missing required database configuration value {key}' + logger.error(error_msg) + + print('Error: ' + error_msg) + sys.exit(-1) + +""" +Special considerations for the database 'ENGINE' setting. +It can be specified in config.yaml (or envvar) as either (for example): +- sqlite3 +- django.db.backends.sqlite3 +- django.db.backends.postgresql +""" + +db_engine = db_config['ENGINE'].lower() + +# Correct common misspelling +if db_engine == 'sqlite': + db_engine = 'sqlite3' # pragma: no cover + +if db_engine in ['sqlite3', 'postgresql', 'mysql']: + # Prepend the required python module string + db_engine = f'django.db.backends.{db_engine}' + db_config['ENGINE'] = db_engine + +db_name = db_config['NAME'] +db_host = db_config.get('HOST', "''") + +if 'sqlite' in db_engine: + db_name = str(Path(db_name).resolve()) + db_config['NAME'] = db_name + +logger.info(f"DB_ENGINE: {db_engine}") +logger.info(f"DB_NAME: {db_name}") +logger.info(f"DB_HOST: {db_host}") + +""" +In addition to base-level database configuration, we may wish to specify specific options to the database backend +Ref: https://docs.djangoproject.com/en/3.2/ref/settings/#std:setting-OPTIONS +""" + +# 'OPTIONS' or 'options' can be specified in config.yaml +# Set useful sensible timeouts for a transactional webserver to communicate +# with its database server, that is, if the webserver is having issues +# connecting to the database server (such as a replica failover) don't sit and +# wait for possibly an hour or more, just tell the client something went wrong +# and let the client retry when they want to. +db_options = db_config.get("OPTIONS", db_config.get("options", {})) + +# Specific options for postgres backend +if "postgres" in db_engine: # pragma: no cover + from psycopg2.extensions import (ISOLATION_LEVEL_READ_COMMITTED, + ISOLATION_LEVEL_SERIALIZABLE) + + # Connection timeout + if "connect_timeout" not in db_options: + # The DB server is in the same data center, it should not take very + # long to connect to the database server + # # seconds, 2 is minium allowed by libpq + db_options["connect_timeout"] = int( + get_setting('INVENTREE_DB_TIMEOUT', 'database.timeout', 2) + ) + + # Setup TCP keepalive + # DB server is in the same DC, it should not become unresponsive for + # very long. With the defaults below we wait 5 seconds for the network + # issue to resolve itself. It it that doesn't happen whatever happened + # is probably fatal and no amount of waiting is going to fix it. + # # 0 - TCP Keepalives disabled; 1 - enabled + if "keepalives" not in db_options: + db_options["keepalives"] = int( + get_setting('INVENTREE_DB_TCP_KEEPALIVES', 'database.tcp_keepalives', 1) + ) + + # Seconds after connection is idle to send keep alive + if "keepalives_idle" not in db_options: + db_options["keepalives_idle"] = int( + get_setting('INVENTREE_DB_TCP_KEEPALIVES_IDLE', 'database.tcp_keepalives_idle', 1) + ) + + # Seconds after missing ACK to send another keep alive + if "keepalives_interval" not in db_options: + db_options["keepalives_interval"] = int( + get_setting("INVENTREE_DB_TCP_KEEPALIVES_INTERVAL", "database.tcp_keepalives_internal", "1") + ) + + # Number of missing ACKs before we close the connection + if "keepalives_count" not in db_options: + db_options["keepalives_count"] = int( + get_setting("INVENTREE_DB_TCP_KEEPALIVES_COUNT", "database.tcp_keepalives_count", "5") + ) + + # # Milliseconds for how long pending data should remain unacked + # by the remote server + # TODO: Supported starting in PSQL 11 + # "tcp_user_timeout": int(os.getenv("PGTCP_USER_TIMEOUT", "1000"), + + # Postgres's default isolation level is Read Committed which is + # normally fine, but most developers think the database server is + # actually going to do Serializable type checks on the queries to + # protect against simultaneous changes. + # https://www.postgresql.org/docs/devel/transaction-iso.html + # https://docs.djangoproject.com/en/3.2/ref/databases/#isolation-level + if "isolation_level" not in db_options: + serializable = get_boolean_setting('INVENTREE_DB_ISOLATION_SERIALIZABLE', 'database.serializable', False) + db_options["isolation_level"] = ISOLATION_LEVEL_SERIALIZABLE if serializable else ISOLATION_LEVEL_READ_COMMITTED + +# Specific options for MySql / MariaDB backend +elif "mysql" in db_engine: # pragma: no cover + # TODO TCP time outs and keepalives + + # MariaDB's default isolation level is Repeatable Read which is + # normally fine, but most developers think the database server is + # actually going to Serializable type checks on the queries to + # protect against siumltaneous changes. + # https://mariadb.com/kb/en/mariadb-transactions-and-isolation-levels-for-sql-server-users/#changing-the-isolation-level + # https://docs.djangoproject.com/en/3.2/ref/databases/#mysql-isolation-level + if "isolation_level" not in db_options: + serializable = get_boolean_setting('INVENTREE_DB_ISOLATION_SERIALIZABLE', 'database.serializable', False) + db_options["isolation_level"] = "serializable" if serializable else "read committed" + +# Specific options for sqlite backend +elif "sqlite" in db_engine: + # TODO: Verify timeouts are not an issue because no network is involved for SQLite + + # SQLite's default isolation level is Serializable due to SQLite's + # single writer implementation. Presumably as a result of this, it is + # not possible to implement any lower isolation levels in SQLite. + # https://www.sqlite.org/isolation.html + pass + +# Provide OPTIONS dict back to the database configuration dict +db_config['OPTIONS'] = db_options + +# Set testing options for the database +db_config['TEST'] = { + 'CHARSET': 'utf8', +} + +# Set collation option for mysql test database +if 'mysql' in db_engine: + db_config['TEST']['COLLATION'] = 'utf8_general_ci' # pragma: no cover + +DATABASES = { + 'default': db_config +} + +# login settings +REMOTE_LOGIN = get_boolean_setting('INVENTREE_REMOTE_LOGIN', 'remote_login_enabled', False) +REMOTE_LOGIN_HEADER = get_setting('INVENTREE_REMOTE_LOGIN_HEADER', 'remote_login_header', 'REMOTE_USER') + +# sentry.io integration for error reporting +SENTRY_ENABLED = get_boolean_setting('INVENTREE_SENTRY_ENABLED', 'sentry_enabled', False) +# Default Sentry DSN (can be overriden if user wants custom sentry integration) +INVENTREE_DSN = 'https://3928ccdba1d34895abde28031fd00100@o378676.ingest.sentry.io/6494600' +SENTRY_DSN = get_setting('INVENTREE_SENTRY_DSN', 'sentry_dsn', INVENTREE_DSN) +SENTRY_SAMPLE_RATE = float(get_setting('INVENTREE_SENTRY_SAMPLE_RATE', 'sentry_sample_rate', 0.1)) + +if SENTRY_ENABLED and SENTRY_DSN: # pragma: no cover + sentry_sdk.init( + dsn=SENTRY_DSN, + integrations=[DjangoIntegration(), ], + traces_sample_rate=1.0 if DEBUG else SENTRY_SAMPLE_RATE, + send_default_pii=True + ) + inventree_tags = { + 'testing': TESTING, + 'docker': DOCKER, + 'debug': DEBUG, + 'remote': REMOTE_LOGIN, + } + for key, val in inventree_tags.items(): + sentry_sdk.set_tag(f'inventree_{key}', val) + +# Cache configuration +cache_host = get_setting('INVENTREE_CACHE_HOST', 'cache.host', None) +cache_port = get_setting('INVENTREE_CACHE_PORT', 'cache.port', '6379', typecast=int) + +if cache_host: # pragma: no cover + # We are going to rely upon a possibly non-localhost for our cache, + # so don't wait too long for the cache as nothing in the cache should be + # irreplacable. + _cache_options = { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + "SOCKET_CONNECT_TIMEOUT": int(os.getenv("CACHE_CONNECT_TIMEOUT", "2")), + "SOCKET_TIMEOUT": int(os.getenv("CACHE_SOCKET_TIMEOUT", "2")), + "CONNECTION_POOL_KWARGS": { + "socket_keepalive": config.is_true( + os.getenv("CACHE_TCP_KEEPALIVE", "1") + ), + "socket_keepalive_options": { + socket.TCP_KEEPCNT: int( + os.getenv("CACHE_KEEPALIVES_COUNT", "5") + ), + socket.TCP_KEEPIDLE: int( + os.getenv("CACHE_KEEPALIVES_IDLE", "1") + ), + socket.TCP_KEEPINTVL: int( + os.getenv("CACHE_KEEPALIVES_INTERVAL", "1") + ), + socket.TCP_USER_TIMEOUT: int( + os.getenv("CACHE_TCP_USER_TIMEOUT", "1000") + ), + }, + }, + } + CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": f"redis://{cache_host}:{cache_port}/0", + "OPTIONS": _cache_options, + }, + } +else: + CACHES = { + "default": { + "BACKEND": "django.core.cache.backends.locmem.LocMemCache", + }, + } + +_q_worker_timeout = int(get_setting('INVENTREE_BACKGROUND_TIMEOUT', 'background.timeout', 90)) + +# django-q background worker configuration +Q_CLUSTER = { + 'name': 'InvenTree', + 'label': 'Background Tasks', + 'workers': int(get_setting('INVENTREE_BACKGROUND_WORKERS', 'background.workers', 4)), + 'timeout': _q_worker_timeout, + 'retry': min(120, _q_worker_timeout + 30), + 'max_attempts': int(get_setting('INVENTREE_BACKGROUND_MAX_ATTEMPTS', 'background.max_attempts', 5)), + 'queue_limit': 50, + 'catch_up': False, + 'bulk': 10, + 'orm': 'default', + 'cache': 'default', + 'sync': False, +} + +# Configure django-q sentry integration +if SENTRY_ENABLED and SENTRY_DSN: + Q_CLUSTER['error_reporter'] = { + 'sentry': { + 'dsn': SENTRY_DSN + } + } + +if cache_host: # pragma: no cover + # If using external redis cache, make the cache the broker for Django Q + # as well + Q_CLUSTER["django_redis"] = "worker" + +# database user sessions +SESSION_ENGINE = 'user_sessions.backends.db' +LOGOUT_REDIRECT_URL = get_setting('INVENTREE_LOGOUT_REDIRECT_URL', 'logout_redirect_url', 'index') +SILENCED_SYSTEM_CHECKS = [ + 'admin.E410', +] + +# Password validation +# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators + +AUTH_PASSWORD_VALIDATORS = [ + { + 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + }, +] + +# Extra (optional) URL validators +# See https://docs.djangoproject.com/en/2.2/ref/validators/#django.core.validators.URLValidator + +EXTRA_URL_SCHEMES = get_setting('INVENTREE_EXTRA_URL_SCHEMES', 'extra_url_schemes', []) + +if type(EXTRA_URL_SCHEMES) not in [list]: # pragma: no cover + logger.warning("extra_url_schemes not correctly formatted") + EXTRA_URL_SCHEMES = [] + +# Internationalization +# https://docs.djangoproject.com/en/dev/topics/i18n/ +LANGUAGE_CODE = get_setting('INVENTREE_LANGUAGE', 'language', 'en-us') +# Store language settings for 30 days +LANGUAGE_COOKIE_AGE = 2592000 + +# If a new language translation is supported, it must be added here +LANGUAGES = [ + ('cs', _('Czech')), + ('da', _('Danish')), + ('de', _('German')), + ('el', _('Greek')), + ('en', _('English')), + ('es', _('Spanish')), + ('es-mx', _('Spanish (Mexican)')), + ('fa', _('Farsi / Persian')), + ('fr', _('French')), + ('he', _('Hebrew')), + ('hu', _('Hungarian')), + ('it', _('Italian')), + ('ja', _('Japanese')), + ('ko', _('Korean')), + ('nl', _('Dutch')), + ('no', _('Norwegian')), + ('pl', _('Polish')), + ('pt', _('Portuguese')), + ('pt-BR', _('Portuguese (Brazilian)')), + ('ru', _('Russian')), + ('sl', _('Slovenian')), + ('sv', _('Swedish')), + ('th', _('Thai')), + ('tr', _('Turkish')), + ('vi', _('Vietnamese')), + ('zh-hans', _('Chinese')), +] + +# Testing interface translations +if get_boolean_setting('TEST_TRANSLATIONS', default_value=False): # pragma: no cover + # Set default language + LANGUAGE_CODE = 'xx' + + # Add to language catalog + LANGUAGES.append(('xx', 'Test')) + + # Add custom languages not provided by Django + EXTRA_LANG_INFO = { + 'xx': { + 'code': 'xx', + 'name': 'Test', + 'name_local': 'Test' + }, + } + LANG_INFO = dict(django.conf.locale.LANG_INFO, **EXTRA_LANG_INFO) + django.conf.locale.LANG_INFO = LANG_INFO + +# Currencies available for use +CURRENCIES = get_setting( + 'INVENTREE_CURRENCIES', 'currencies', + ['AUD', 'CAD', 'CNY', 'EUR', 'GBP', 'JPY', 'NZD', 'USD'], + typecast=list, +) + +# Maximum number of decimal places for currency rendering +CURRENCY_DECIMAL_PLACES = 6 + +# Check that each provided currency is supported +for currency in CURRENCIES: + if currency not in moneyed.CURRENCIES: # pragma: no cover + logger.error(f"Currency code '{currency}' is not supported") + sys.exit(1) + +# Custom currency exchange backend +EXCHANGE_BACKEND = 'InvenTree.exchange.InvenTreeExchange' + +# Email configuration options +EMAIL_BACKEND = get_setting('INVENTREE_EMAIL_BACKEND', 'email.backend', 'django.core.mail.backends.smtp.EmailBackend') +EMAIL_HOST = get_setting('INVENTREE_EMAIL_HOST', 'email.host', '') +EMAIL_PORT = get_setting('INVENTREE_EMAIL_PORT', 'email.port', 25, typecast=int) +EMAIL_HOST_USER = get_setting('INVENTREE_EMAIL_USERNAME', 'email.username', '') +EMAIL_HOST_PASSWORD = get_setting('INVENTREE_EMAIL_PASSWORD', 'email.password', '') +EMAIL_SUBJECT_PREFIX = get_setting('INVENTREE_EMAIL_PREFIX', 'email.prefix', '[InvenTree] ') +EMAIL_USE_TLS = get_boolean_setting('INVENTREE_EMAIL_TLS', 'email.tls', False) +EMAIL_USE_SSL = get_boolean_setting('INVENTREE_EMAIL_SSL', 'email.ssl', False) + +DEFAULT_FROM_EMAIL = get_setting('INVENTREE_EMAIL_SENDER', 'email.sender', '') + +EMAIL_USE_LOCALTIME = False +EMAIL_TIMEOUT = 60 + +LOCALE_PATHS = ( + BASE_DIR.joinpath('locale/'), +) + +TIME_ZONE = get_setting('INVENTREE_TIMEZONE', 'timezone', 'UTC') + +USE_I18N = True + +USE_L10N = True + +# Do not use native timezone support in "test" mode +# It generates a *lot* of cruft in the logs +if not TESTING: + USE_TZ = True # pragma: no cover + +DATE_INPUT_FORMATS = [ + "%Y-%m-%d", +] + +# crispy forms use the bootstrap templates +CRISPY_TEMPLATE_PACK = 'bootstrap4' + +# Use database transactions when importing / exporting data +IMPORT_EXPORT_USE_TRANSACTIONS = True + +SITE_ID = 1 + +# Load the allauth social backends +SOCIAL_BACKENDS = get_setting('INVENTREE_SOCIAL_BACKENDS', 'social_backends', [], typecast=list) + +for app in SOCIAL_BACKENDS: + INSTALLED_APPS.append(app) # pragma: no cover + +SOCIALACCOUNT_PROVIDERS = get_setting('INVENTREE_SOCIAL_PROVIDERS', 'social_providers', None, typecast=dict) + +SOCIALACCOUNT_STORE_TOKENS = True + +# settings for allauth +ACCOUNT_EMAIL_CONFIRMATION_EXPIRE_DAYS = get_setting('INVENTREE_LOGIN_CONFIRM_DAYS', 'login_confirm_days', 3, typecast=int) +ACCOUNT_LOGIN_ATTEMPTS_LIMIT = get_setting('INVENTREE_LOGIN_ATTEMPTS', 'login_attempts', 5, typecast=int) +ACCOUNT_DEFAULT_HTTP_PROTOCOL = get_setting('INVENTREE_LOGIN_DEFAULT_HTTP_PROTOCOL', 'login_default_protocol', 'http') +ACCOUNT_LOGOUT_ON_PASSWORD_CHANGE = True +ACCOUNT_PREVENT_ENUMERATION = True + +# override forms / adapters +ACCOUNT_FORMS = { + 'login': 'allauth.account.forms.LoginForm', + 'signup': 'InvenTree.forms.CustomSignupForm', + 'add_email': 'allauth.account.forms.AddEmailForm', + 'change_password': 'allauth.account.forms.ChangePasswordForm', + 'set_password': 'allauth.account.forms.SetPasswordForm', + 'reset_password': 'allauth.account.forms.ResetPasswordForm', + 'reset_password_from_key': 'allauth.account.forms.ResetPasswordKeyForm', + 'disconnect': 'allauth.socialaccount.forms.DisconnectForm', +} + +SOCIALACCOUNT_ADAPTER = 'InvenTree.forms.CustomSocialAccountAdapter' +ACCOUNT_ADAPTER = 'InvenTree.forms.CustomAccountAdapter' + +# Markdownify configuration +# Ref: https://django-markdownify.readthedocs.io/en/latest/settings.html + +MARKDOWNIFY = { + 'default': { + 'BLEACH': True, + 'WHITELIST_ATTRS': [ + 'href', + 'src', + 'alt', + ], + 'MARKDOWN_EXTENSIONS': [ + 'markdown.extensions.extra' + ], + 'WHITELIST_TAGS': [ + 'a', + 'abbr', + 'b', + 'blockquote', + 'em', + 'h1', 'h2', 'h3', + 'i', + 'img', + 'li', + 'ol', + 'p', + 'strong', + 'ul', + 'table', + 'thead', + 'tbody', + 'th', + 'tr', + 'td' + ], + } +} + +# Ignore these error typeps for in-database error logging +IGNORED_ERRORS = [ + Http404, + django.core.exceptions.PermissionDenied, +] + +# Maintenance mode +MAINTENANCE_MODE_RETRY_AFTER = 60 +MAINTENANCE_MODE_STATE_BACKEND = 'maintenance_mode.backends.StaticStorageBackend' + +# Are plugins enabled? +PLUGINS_ENABLED = get_boolean_setting('INVENTREE_PLUGINS_ENABLED', 'plugins_enabled', False) + +PLUGIN_FILE = config.get_plugin_file() + +# Plugin test settings +PLUGIN_TESTING = get_setting('INVENTREE_PLUGIN_TESTING', 'PLUGIN_TESTING', TESTING) # Are plugins beeing tested? +PLUGIN_TESTING_SETUP = get_setting('INVENTREE_PLUGIN_TESTING_SETUP', 'PLUGIN_TESTING_SETUP', False) # Load plugins from setup hooks in testing? +PLUGIN_TESTING_EVENTS = False # Flag if events are tested right now +PLUGIN_RETRY = get_setting('INVENTREE_PLUGIN_RETRY', 'PLUGIN_RETRY', 5) # How often should plugin loading be tried? +PLUGIN_FILE_CHECKED = False # Was the plugin file checked? + +# User interface customization values +CUSTOM_LOGO = get_custom_file('INVENTREE_CUSTOM_LOGO', 'customize.logo', 'custom logo', lookup_media=True) +CUSTOM_SPLASH = get_custom_file('INVENTREE_CUSTOM_SPLASH', 'customize.splash', 'custom splash') + +CUSTOMIZE = get_setting('INVENTREE_CUSTOMIZE', 'customize', {}) +if DEBUG: + logger.info("InvenTree running with DEBUG enabled") + +logger.info(f"MEDIA_ROOT: '{MEDIA_ROOT}'") +logger.info(f"STATIC_ROOT: '{STATIC_ROOT}'") diff --git a/sample.env b/sample.env new file mode 100644 index 0000000..9fff491 --- /dev/null +++ b/sample.env @@ -0,0 +1,68 @@ +# InvenTree environment variables for a postgresql production setup +COMPOSE_PROJECT_NAME=inventree + +# Location of persistent database data (stored external to the docker containers) +# Note: You *must* un-comment this line, and point it to a path on your local machine + +# e.g. Linux +INVENTREE_EXT_VOLUME=data + +# e.g. Windows (docker desktop) +#INVENTREE_EXT_VOLUME=c:/Users/me/inventree-data + +# Default web port for the InvenTree server +INVENTREE_WEB_PORT=8080 + +# Ensure debug is false for a production setup +INVENTREE_DEBUG=False +INVENTREE_LOG_LEVEL=WARNING + +# Database configuration options +# Note: The example setup is for a PostgreSQL database +INVENTREE_DB_ENGINE=postgresql +INVENTREE_DB_NAME=inventree +INVENTREE_DB_HOST=inventree-db +INVENTREE_DB_PORT=5432 + +# Redis cache setup (disabled by default) +# Un-comment the following lines to enable Redis cache +# Note that you will also have to run docker-compose with the --profile redis command +# Refer to settings.py for other cache options +#INVENTREE_CACHE_HOST=inventree-cache +#INVENTREE_CACHE_PORT=6379 + +# Options for gunicorn server +INVENTREE_GUNICORN_TIMEOUT=30 + +# Enable custom plugins? +INVENTREE_PLUGINS_ENABLED=False + +# Image tag that should be used +INVENTREE_IMAGE=inventree/inventree:0.10.1 +REDIS_IMAGE=redis:7.0-alpine +NGINX_IMAGE=nginxinc/nginx-unprivileged:stable-alpine +# Postgres image must match version of pgdump in inventree image +POSTGRES_IMAGE=postgres:13-alpine + +# InvenTree admin account details +# make sure to use secure credentials these lines to auto-create an admin acount +INVENTREE_ADMIN_USER=admin +INVENTREE_ADMIN_PASSWORD=password +INVENTREE_ADMIN_EMAIL=admin@inventree.example + +# Database credentials - These must be configured before running +# Change from the default values! +INVENTREE_DB_USER=inventree +INVENTREE_DB_PASSWORD=password + +# Django configuration +INVENTREE_SECRET_KEY=some-secret-key +ALLOWED_HOSTS=inventree.example.com,www.inventree.example.com + +# SSO Config +INVENTREE_SOCIAL_BACKENDS=allauth.socialaccount.providers.keycloak + +HKNG_KEYCLOAK_URL=https://keycloak.example.com +HKNG_KEYCLOAK_REALM=master +HKNG_KEYCLOAK_CLIENT_ID=example-client +HKNG_KEYCLOAK_CLIENT_SECRET=example-secret From 8aed879a5fa59e761922e613145e9597560587fe Mon Sep 17 00:00:00 2001 From: philipp Date: Sun, 19 Mar 2023 18:53:39 +0100 Subject: [PATCH 02/24] remove unused variables --- Makefile | 3 --- docker-compose.yml | 3 ++- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index 4abc95c..a152205 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,3 @@ -CONFIG_FILE = conf/config.env -SECRET_FILE = conf/secrets.env - COMPOSE_CMD ?= docker compose # use unix:///run/docker.sock for docker socket, unix://${XDG_RUNTIME_DIR}/podman/podman.sock for podman DOCKER_HOST ?= unix:///run/docker.sock diff --git a/docker-compose.yml b/docker-compose.yml index c7957b7..13d476b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -100,9 +100,10 @@ services: volumes: # Data volume must map to /home/inventree/data - inventree_data:/home/inventree/data:z + - ./plugins:/home/inventree/InvenTree/plugins:z # ugly backport of 0.11.0 features, to be removed - ./patch/settings.py:/home/inventree/InvenTree/InvenTree/settings.py:ro,Z - - ./patch/config.py:/home/inventree/InvenTree/InvenTree/config.py:zo,Z + - ./patch/config.py:/home/inventree/InvenTree/InvenTree/config.py:ro,Z restart: unless-stopped # Background worker process handles long-running or periodic tasks From e8f9ff68dc9c8227b3ee754fa304d6e880d73aee Mon Sep 17 00:00:00 2001 From: philipp Date: Sun, 18 Jun 2023 19:06:19 +0200 Subject: [PATCH 03/24] cleanup: Remove fixes for pre 0.11.X releases --- docker-compose.yml | 3 - patch/config.py | 347 ----------------- patch/settings.py | 914 --------------------------------------------- sample.env | 2 +- 4 files changed, 1 insertion(+), 1265 deletions(-) delete mode 100644 patch/config.py delete mode 100644 patch/settings.py diff --git a/docker-compose.yml b/docker-compose.yml index 13d476b..6d6dd69 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -101,9 +101,6 @@ services: # Data volume must map to /home/inventree/data - inventree_data:/home/inventree/data:z - ./plugins:/home/inventree/InvenTree/plugins:z - # ugly backport of 0.11.0 features, to be removed - - ./patch/settings.py:/home/inventree/InvenTree/InvenTree/settings.py:ro,Z - - ./patch/config.py:/home/inventree/InvenTree/InvenTree/config.py:ro,Z restart: unless-stopped # Background worker process handles long-running or periodic tasks diff --git a/patch/config.py b/patch/config.py deleted file mode 100644 index 720e7bd..0000000 --- a/patch/config.py +++ /dev/null @@ -1,347 +0,0 @@ -"""Helper functions for loading InvenTree configuration options.""" - -import datetime -import json -import logging -import os -import random -import shutil -import string -from pathlib import Path - -logger = logging.getLogger('inventree') -CONFIG_DATA = None -CONFIG_LOOKUPS = {} - - -def to_list(value, delimiter=','): - """Take a configuration setting and make sure it is a list. - - For example, we might have a configuration setting taken from the .config file, - which is already a list. - - However, the same setting may be specified via an environment variable, - using a comma delimited string! - """ - - if type(value) in [list, tuple]: - return value - - # Otherwise, force string value - value = str(value) - - return [x.strip() for x in value.split(delimiter)] - - -def to_dict(value): - """Take a configuration setting and make sure it is a dict. - - For example, we might have a configuration setting taken from the .config file, - which is already an object/dict. - - However, the same setting may be specified via an environment variable, - using a valid JSON string! - """ - if value is None: - return {} - - if type(value) == dict: - return value - - try: - return json.loads(value) - except Exception as error: - logger.error(f"Failed to parse value '{value}' as JSON with error {error}. Ensure value is a valid JSON string.") - return {} - - -def is_true(x): - """Shortcut function to determine if a value "looks" like a boolean""" - return str(x).strip().lower() in ['1', 'y', 'yes', 't', 'true', 'on'] - - -def get_base_dir() -> Path: - """Returns the base (top-level) InvenTree directory.""" - return Path(__file__).parent.parent.resolve() - - -def ensure_dir(path: Path) -> None: - """Ensure that a directory exists. - - If it does not exist, create it. - """ - - if not path.exists(): - path.mkdir(parents=True, exist_ok=True) - - -def get_config_file(create=True) -> Path: - """Returns the path of the InvenTree configuration file. - - Note: It will be created it if does not already exist! - """ - base_dir = get_base_dir() - - cfg_filename = os.getenv('INVENTREE_CONFIG_FILE') - - if cfg_filename: - cfg_filename = Path(cfg_filename.strip()).resolve() - else: - # Config file is *not* specified - use the default - cfg_filename = base_dir.joinpath('config.yaml').resolve() - - if not cfg_filename.exists() and create: - print("InvenTree configuration file 'config.yaml' not found - creating default file") - ensure_dir(cfg_filename.parent) - - cfg_template = base_dir.joinpath("config_template.yaml") - shutil.copyfile(cfg_template, cfg_filename) - print(f"Created config file {cfg_filename}") - - return cfg_filename - - -def load_config_data(set_cache: bool = False) -> map: - """Load configuration data from the config file. - - Arguments: - set_cache(bool): If True, the configuration data will be cached for future use after load. - """ - global CONFIG_DATA - - # use cache if populated - # skip cache if cache should be set - if CONFIG_DATA is not None and not set_cache: - return CONFIG_DATA - - import yaml - - cfg_file = get_config_file() - - with open(cfg_file, 'r') as cfg: - data = yaml.safe_load(cfg) - - # Set the cache if requested - if set_cache: - CONFIG_DATA = data - - return data - - -def get_setting(env_var=None, config_key=None, default_value=None, typecast=None): - """Helper function for retrieving a configuration setting value. - - - First preference is to look for the environment variable - - Second preference is to look for the value of the settings file - - Third preference is the default value - - Arguments: - env_var: Name of the environment variable e.g. 'INVENTREE_STATIC_ROOT' - config_key: Key to lookup in the configuration file - default_value: Value to return if first two options are not provided - typecast: Function to use for typecasting the value - """ - def try_typecasting(value, source: str): - """Attempt to typecast the value""" - - # Force 'list' of strings - if typecast is list: - value = to_list(value) - - # Valid JSON string is required - elif typecast is dict: - value = to_dict(value) - - elif typecast is not None: - # Try to typecast the value - try: - val = typecast(value) - set_metadata(source) - return val - except Exception as error: - logger.error(f"Failed to typecast '{env_var}' with value '{value}' to type '{typecast}' with error {error}") - - set_metadata(source) - return value - - def set_metadata(source: str): - """Set lookup metadata for the setting.""" - key = env_var or config_key - CONFIG_LOOKUPS[key] = {'env_var': env_var, 'config_key': config_key, 'source': source, 'accessed': datetime.datetime.now()} - - # First, try to load from the environment variables - if env_var is not None: - val = os.getenv(env_var, None) - - if val is not None: - return try_typecasting(val, 'env') - - # Next, try to load from configuration file - if config_key is not None: - cfg_data = load_config_data() - - result = None - - # Hack to allow 'path traversal' in configuration file - for key in config_key.strip().split('.'): - - if type(cfg_data) is not dict or key not in cfg_data: - result = None - break - - result = cfg_data[key] - cfg_data = cfg_data[key] - - if result is not None: - return try_typecasting(result, 'yaml') - - # Finally, return the default value - return try_typecasting(default_value, 'default') - - -def get_boolean_setting(env_var=None, config_key=None, default_value=False): - """Helper function for retreiving a boolean configuration setting""" - - return is_true(get_setting(env_var, config_key, default_value)) - - -def get_media_dir(create=True): - """Return the absolute path for the 'media' directory (where uploaded files are stored)""" - - md = get_setting('INVENTREE_MEDIA_ROOT', 'media_root') - - if not md: - raise FileNotFoundError('INVENTREE_MEDIA_ROOT not specified') - - md = Path(md).resolve() - - if create: - md.mkdir(parents=True, exist_ok=True) - - return md - - -def get_static_dir(create=True): - """Return the absolute path for the 'static' directory (where static files are stored)""" - - sd = get_setting('INVENTREE_STATIC_ROOT', 'static_root') - - if not sd: - raise FileNotFoundError('INVENTREE_STATIC_ROOT not specified') - - sd = Path(sd).resolve() - - if create: - sd.mkdir(parents=True, exist_ok=True) - - return sd - - -def get_backup_dir(create=True): - """Return the absolute path for the backup directory""" - - bd = get_setting('INVENTREE_BACKUP_DIR', 'backup_dir') - - if not bd: - raise FileNotFoundError('INVENTREE_BACKUP_DIR not specified') - - bd = Path(bd).resolve() - - if create: - bd.mkdir(parents=True, exist_ok=True) - - return bd - - -def get_plugin_file(): - """Returns the path of the InvenTree plugins specification file. - - Note: It will be created if it does not already exist! - """ - - # Check if the plugin.txt file (specifying required plugins) is specified - plugin_file = get_setting('INVENTREE_PLUGIN_FILE', 'plugin_file') - - if not plugin_file: - # If not specified, look in the same directory as the configuration file - config_dir = get_config_file().parent - plugin_file = config_dir.joinpath('plugins.txt') - else: - # Make sure we are using a modern Path object - plugin_file = Path(plugin_file) - - if not plugin_file.exists(): - logger.warning("Plugin configuration file does not exist - creating default file") - logger.info(f"Creating plugin file at '{plugin_file}'") - ensure_dir(plugin_file.parent) - - # If opening the file fails (no write permission, for example), then this will throw an error - plugin_file.write_text("# InvenTree Plugins (uses PIP framework to install)\n\n") - - return plugin_file - - -def get_secret_key(): - """Return the secret key value which will be used by django. - - Following options are tested, in descending order of preference: - - A) Check for environment variable INVENTREE_SECRET_KEY => Use raw key data - B) Check for environment variable INVENTREE_SECRET_KEY_FILE => Load key data from file - C) Look for default key file "secret_key.txt" - D) Create "secret_key.txt" if it does not exist - """ - - # Look for environment variable - if secret_key := get_setting('INVENTREE_SECRET_KEY', 'secret_key'): - logger.info("SECRET_KEY loaded by INVENTREE_SECRET_KEY") # pragma: no cover - return secret_key - - # Look for secret key file - if secret_key_file := get_setting('INVENTREE_SECRET_KEY_FILE', 'secret_key_file'): - secret_key_file = Path(secret_key_file).resolve() - else: - # Default location for secret key file - secret_key_file = get_base_dir().joinpath("secret_key.txt").resolve() - - if not secret_key_file.exists(): - logger.info(f"Generating random key file at '{secret_key_file}'") - ensure_dir(secret_key_file.parent) - - # Create a random key file - options = string.digits + string.ascii_letters + string.punctuation - key = ''.join([random.choice(options) for i in range(100)]) - secret_key_file.write_text(key) - - logger.info(f"Loading SECRET_KEY from '{secret_key_file}'") - - key_data = secret_key_file.read_text().strip() - - return key_data - - -def get_custom_file(env_ref: str, conf_ref: str, log_ref: str, lookup_media: bool = False): - """Returns the checked path to a custom file. - - Set lookup_media to True to also search in the media folder. - """ - from django.contrib.staticfiles.storage import StaticFilesStorage - from django.core.files.storage import default_storage - - value = get_setting(env_ref, conf_ref, None) - - if not value: - return None - - static_storage = StaticFilesStorage() - - if static_storage.exists(value): - logger.info(f"Loading {log_ref} from static directory: {value}") - elif lookup_media and default_storage.exists(value): - logger.info(f"Loading {log_ref} from media directory: {value}") - else: - add_dir_str = ' or media' if lookup_media else '' - logger.warning(f"The {log_ref} file '{value}' could not be found in the static{add_dir_str} directories") - value = False - - return value diff --git a/patch/settings.py b/patch/settings.py deleted file mode 100644 index 1aab68d..0000000 --- a/patch/settings.py +++ /dev/null @@ -1,914 +0,0 @@ -"""Django settings for InvenTree project. - -In practice the settings in this file should not be adjusted, -instead settings can be configured in the config.yaml file -located in the top level project directory. - -This allows implementation configuration to be hidden from source control, -as well as separate configuration parameters from the more complex -database setup in this file. -""" - -import logging -import os -import socket -import sys -from pathlib import Path - -import django.conf.locale -import django.core.exceptions -from django.http import Http404 -from django.utils.translation import gettext_lazy as _ - -import moneyed -import sentry_sdk -from sentry_sdk.integrations.django import DjangoIntegration - -from . import config -from .config import get_boolean_setting, get_custom_file, get_setting - -INVENTREE_NEWS_URL = 'https://inventree.org/news/feed.atom' - -# Determine if we are running in "test" mode e.g. "manage.py test" -TESTING = 'test' in sys.argv - -if TESTING: - - # Use a weaker password hasher for testing (improves testing speed) - PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher',] - - # Enable slow-test-runner - TEST_RUNNER = 'django_slowtests.testrunner.DiscoverSlowestTestsRunner' - NUM_SLOW_TESTS = 25 - - # Note: The following fix is "required" for docker build workflow - # Note: 2022-12-12 still unsure why... - if os.getenv('INVENTREE_DOCKER'): - # Ensure that sys.path includes global python libs - site_packages = '/usr/local/lib/python3.9/site-packages' - - if site_packages not in sys.path: - print("Adding missing site-packages path:", site_packages) - sys.path.append(site_packages) - -# Are environment variables manipulated by tests? Needs to be set by testing code -TESTING_ENV = False - -# New requirement for django 3.2+ -DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' - -# Build paths inside the project like this: BASE_DIR.joinpath(...) -BASE_DIR = config.get_base_dir() - -# Load configuration data -CONFIG = config.load_config_data(set_cache=True) - -# Default action is to run the system in Debug mode -# SECURITY WARNING: don't run with debug turned on in production! -DEBUG = get_boolean_setting('INVENTREE_DEBUG', 'debug', True) - -# Configure logging settings -log_level = get_setting('INVENTREE_LOG_LEVEL', 'log_level', 'WARNING') - -logging.basicConfig( - level=log_level, - format="%(asctime)s %(levelname)s %(message)s", -) - -if log_level not in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']: - log_level = 'WARNING' # pragma: no cover - -LOGGING = { - 'version': 1, - 'disable_existing_loggers': False, - 'handlers': { - 'console': { - 'class': 'logging.StreamHandler', - }, - }, - 'root': { - 'handlers': ['console'], - 'level': log_level, - }, - 'filters': { - 'require_not_maintenance_mode_503': { - '()': 'maintenance_mode.logging.RequireNotMaintenanceMode503', - }, - }, -} - -# Get a logger instance for this setup file -logger = logging.getLogger("inventree") - -# Load SECRET_KEY -SECRET_KEY = config.get_secret_key() - -# The filesystem location for served static files -STATIC_ROOT = config.get_static_dir() - -# The filesystem location for uploaded meadia files -MEDIA_ROOT = config.get_media_dir() - -# List of allowed hosts (default = allow all) -ALLOWED_HOSTS = get_setting( - "INVENTREE_ALLOWED_HOSTS", - config_key='allowed_hosts', - default_value=['*'], - typecast=list, -) - -# Cross Origin Resource Sharing (CORS) options - -# Only allow CORS access to API -CORS_URLS_REGEX = r'^/api/.*$' - -# Extract CORS options from configuration file -CORS_ORIGIN_ALLOW_ALL = get_boolean_setting( - "INVENTREE_CORS_ORIGIN_ALLOW_ALL", - config_key='cors.allow_all', - default_value=False, -) - -CORS_ORIGIN_WHITELIST = get_setting( - "INVENTREE_CORS_ORIGIN_WHITELIST", - config_key='cors.whitelist', - default_value=[], - typecast=list, -) - -# Needed for the parts importer, directly impacts the maximum parts that can be uploaded -DATA_UPLOAD_MAX_NUMBER_FIELDS = 10000 - -# Web URL endpoint for served static files -STATIC_URL = '/static/' - -STATICFILES_DIRS = [] - -# Translated Template settings -STATICFILES_I18_PREFIX = 'i18n' -STATICFILES_I18_SRC = BASE_DIR.joinpath('templates', 'js', 'translated') -STATICFILES_I18_TRG = BASE_DIR.joinpath('InvenTree', 'static_i18n') -STATICFILES_DIRS.append(STATICFILES_I18_TRG) -STATICFILES_I18_TRG = STATICFILES_I18_TRG.joinpath(STATICFILES_I18_PREFIX) - -STATFILES_I18_PROCESSORS = [ - 'InvenTree.context.status_codes', -] - -# Color Themes Directory -STATIC_COLOR_THEMES_DIR = STATIC_ROOT.joinpath('css', 'color-themes').resolve() - -# Web URL endpoint for served media files -MEDIA_URL = '/media/' - -# Database backup options -# Ref: https://django-dbbackup.readthedocs.io/en/master/configuration.html -DBBACKUP_SEND_EMAIL = False -DBBACKUP_STORAGE = get_setting( - 'INVENTREE_BACKUP_STORAGE', - 'backup_storage', - 'django.core.files.storage.FileSystemStorage' -) - -# Default backup configuration -DBBACKUP_STORAGE_OPTIONS = get_setting('INVENTREE_BACKUP_OPTIONS', 'backup_options', None) -if DBBACKUP_STORAGE_OPTIONS is None: - DBBACKUP_STORAGE_OPTIONS = { - 'location': config.get_backup_dir(), - } - -# Application definition - -INSTALLED_APPS = [ - # Admin site integration - 'django.contrib.admin', - - # InvenTree apps - 'build.apps.BuildConfig', - 'common.apps.CommonConfig', - 'company.apps.CompanyConfig', - 'label.apps.LabelConfig', - 'order.apps.OrderConfig', - 'part.apps.PartConfig', - 'report.apps.ReportConfig', - 'stock.apps.StockConfig', - 'users.apps.UsersConfig', - 'plugin.apps.PluginAppConfig', - 'InvenTree.apps.InvenTreeConfig', # InvenTree app runs last - - # Core django modules - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'user_sessions', # db user sessions - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'django.contrib.sites', - - # Maintenance - 'maintenance_mode', - - # Third part add-ons - 'django_filters', # Extended filter functionality - 'rest_framework', # DRF (Django Rest Framework) - 'rest_framework.authtoken', # Token authentication for API - 'corsheaders', # Cross-origin Resource Sharing for DRF - 'crispy_forms', # Improved form rendering - 'import_export', # Import / export tables to file - 'django_cleanup.apps.CleanupConfig', # Automatically delete orphaned MEDIA files - 'mptt', # Modified Preorder Tree Traversal - 'markdownify', # Markdown template rendering - 'djmoney', # django-money integration - 'djmoney.contrib.exchange', # django-money exchange rates - 'error_report', # Error reporting in the admin interface - 'django_q', - 'formtools', # Form wizard tools - 'dbbackup', # Backups - django-dbbackup - - 'allauth', # Base app for SSO - 'allauth.account', # Extend user with accounts - 'allauth.socialaccount', # Use 'social' providers - - 'django_otp', # OTP is needed for MFA - base package - 'django_otp.plugins.otp_totp', # Time based OTP - 'django_otp.plugins.otp_static', # Backup codes - - 'allauth_2fa', # MFA flow for allauth - - 'django_ical', # For exporting calendars -] - -MIDDLEWARE = CONFIG.get('middleware', [ - 'django.middleware.security.SecurityMiddleware', - 'x_forwarded_for.middleware.XForwardedForMiddleware', - 'user_sessions.middleware.SessionMiddleware', # db user sessions - 'django.middleware.locale.LocaleMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'corsheaders.middleware.CorsMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'InvenTree.middleware.InvenTreeRemoteUserMiddleware', # Remote / proxy auth - 'django_otp.middleware.OTPMiddleware', # MFA support - 'InvenTree.middleware.CustomAllauthTwoFactorMiddleware', # Flow control for allauth - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', - 'InvenTree.middleware.AuthRequiredMiddleware', - 'InvenTree.middleware.Check2FAMiddleware', # Check if the user should be forced to use MFA - 'maintenance_mode.middleware.MaintenanceModeMiddleware', - 'InvenTree.middleware.InvenTreeExceptionProcessor', # Error reporting -]) - -AUTHENTICATION_BACKENDS = CONFIG.get('authentication_backends', [ - 'django.contrib.auth.backends.RemoteUserBackend', # proxy login - 'django.contrib.auth.backends.ModelBackend', - 'allauth.account.auth_backends.AuthenticationBackend', # SSO login via external providers -]) - -DEBUG_TOOLBAR_ENABLED = DEBUG and get_setting('INVENTREE_DEBUG_TOOLBAR', 'debug_toolbar', False) - -# If the debug toolbar is enabled, add the modules -if DEBUG_TOOLBAR_ENABLED: # pragma: no cover - logger.info("Running with DEBUG_TOOLBAR enabled") - INSTALLED_APPS.append('debug_toolbar') - MIDDLEWARE.append('debug_toolbar.middleware.DebugToolbarMiddleware') - - DEBUG_TOOLBAR_CONFIG = { - 'RESULTS_CACHE_SIZE': 100, - 'OBSERVE_REQUEST_CALLBACK': lambda x: False, - } - -# Internal IP addresses allowed to see the debug toolbar -INTERNAL_IPS = [ - '127.0.0.1', -] - -# Internal flag to determine if we are running in docker mode -DOCKER = get_boolean_setting('INVENTREE_DOCKER', default_value=False) - -if DOCKER: # pragma: no cover - # Internal IP addresses are different when running under docker - hostname, ___, ips = socket.gethostbyname_ex(socket.gethostname()) - INTERNAL_IPS = [ip[: ip.rfind(".")] + ".1" for ip in ips] + ["127.0.0.1", "10.0.2.2"] - -# Allow secure http developer server in debug mode -if DEBUG: - INSTALLED_APPS.append('sslserver') - -# InvenTree URL configuration - -# Base URL for admin pages (default="admin") -INVENTREE_ADMIN_URL = get_setting( - 'INVENTREE_ADMIN_URL', - config_key='admin_url', - default_value='admin' -) - -ROOT_URLCONF = 'InvenTree.urls' - -TEMPLATES = [ - { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [ - BASE_DIR.joinpath('templates'), - # Allow templates in the reporting directory to be accessed - MEDIA_ROOT.joinpath('report'), - MEDIA_ROOT.joinpath('label'), - ], - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'django.template.context_processors.i18n', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', - # Custom InvenTree context processors - 'InvenTree.context.health_status', - 'InvenTree.context.status_codes', - 'InvenTree.context.user_roles', - ], - 'loaders': [( - 'django.template.loaders.cached.Loader', [ - 'plugin.template.PluginTemplateLoader', - 'django.template.loaders.filesystem.Loader', - 'django.template.loaders.app_directories.Loader', - ]) - ], - }, - }, -] - -if DEBUG_TOOLBAR_ENABLED: # pragma: no cover - # Note that the APP_DIRS value must be set when using debug_toolbar - # But this will kill template loading for plugins - TEMPLATES[0]['APP_DIRS'] = True - del TEMPLATES[0]['OPTIONS']['loaders'] - -REST_FRAMEWORK = { - 'EXCEPTION_HANDLER': 'InvenTree.exceptions.exception_handler', - 'DATETIME_FORMAT': '%Y-%m-%d %H:%M', - 'DEFAULT_AUTHENTICATION_CLASSES': ( - 'rest_framework.authentication.BasicAuthentication', - 'rest_framework.authentication.SessionAuthentication', - 'rest_framework.authentication.TokenAuthentication', - ), - 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination', - 'DEFAULT_PERMISSION_CLASSES': ( - 'rest_framework.permissions.IsAuthenticated', - 'rest_framework.permissions.DjangoModelPermissions', - 'InvenTree.permissions.RolePermission', - ), - 'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.coreapi.AutoSchema', - 'DEFAULT_METADATA_CLASS': 'InvenTree.metadata.InvenTreeMetadata', - 'DEFAULT_RENDERER_CLASSES': [ - 'rest_framework.renderers.JSONRenderer', - ] -} - -if DEBUG: - # Enable browsable API if in DEBUG mode - REST_FRAMEWORK['DEFAULT_RENDERER_CLASSES'].append('rest_framework.renderers.BrowsableAPIRenderer') - -WSGI_APPLICATION = 'InvenTree.wsgi.application' - -""" -Configure the database backend based on the user-specified values. - -- Primarily this configuration happens in the config.yaml file -- However there may be reason to configure the DB via environmental variables -- The following code lets the user "mix and match" database configuration -""" - -logger.debug("Configuring database backend:") - -# Extract database configuration from the config.yaml file -db_config = CONFIG.get('database', {}) - -if not db_config: - db_config = {} - -# Environment variables take preference over config file! - -db_keys = ['ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT'] - -for key in db_keys: - # First, check the environment variables - env_key = f"INVENTREE_DB_{key}" - env_var = os.environ.get(env_key, None) - - if env_var: - # Make use PORT is int - if key == 'PORT': - try: - env_var = int(env_var) - except ValueError: - logger.error(f"Invalid number for {env_key}: {env_var}") - # Override configuration value - db_config[key] = env_var - -# Check that required database configuration options are specified -required_keys = ['ENGINE', 'NAME'] - -for key in required_keys: - if key not in db_config: # pragma: no cover - error_msg = f'Missing required database configuration value {key}' - logger.error(error_msg) - - print('Error: ' + error_msg) - sys.exit(-1) - -""" -Special considerations for the database 'ENGINE' setting. -It can be specified in config.yaml (or envvar) as either (for example): -- sqlite3 -- django.db.backends.sqlite3 -- django.db.backends.postgresql -""" - -db_engine = db_config['ENGINE'].lower() - -# Correct common misspelling -if db_engine == 'sqlite': - db_engine = 'sqlite3' # pragma: no cover - -if db_engine in ['sqlite3', 'postgresql', 'mysql']: - # Prepend the required python module string - db_engine = f'django.db.backends.{db_engine}' - db_config['ENGINE'] = db_engine - -db_name = db_config['NAME'] -db_host = db_config.get('HOST', "''") - -if 'sqlite' in db_engine: - db_name = str(Path(db_name).resolve()) - db_config['NAME'] = db_name - -logger.info(f"DB_ENGINE: {db_engine}") -logger.info(f"DB_NAME: {db_name}") -logger.info(f"DB_HOST: {db_host}") - -""" -In addition to base-level database configuration, we may wish to specify specific options to the database backend -Ref: https://docs.djangoproject.com/en/3.2/ref/settings/#std:setting-OPTIONS -""" - -# 'OPTIONS' or 'options' can be specified in config.yaml -# Set useful sensible timeouts for a transactional webserver to communicate -# with its database server, that is, if the webserver is having issues -# connecting to the database server (such as a replica failover) don't sit and -# wait for possibly an hour or more, just tell the client something went wrong -# and let the client retry when they want to. -db_options = db_config.get("OPTIONS", db_config.get("options", {})) - -# Specific options for postgres backend -if "postgres" in db_engine: # pragma: no cover - from psycopg2.extensions import (ISOLATION_LEVEL_READ_COMMITTED, - ISOLATION_LEVEL_SERIALIZABLE) - - # Connection timeout - if "connect_timeout" not in db_options: - # The DB server is in the same data center, it should not take very - # long to connect to the database server - # # seconds, 2 is minium allowed by libpq - db_options["connect_timeout"] = int( - get_setting('INVENTREE_DB_TIMEOUT', 'database.timeout', 2) - ) - - # Setup TCP keepalive - # DB server is in the same DC, it should not become unresponsive for - # very long. With the defaults below we wait 5 seconds for the network - # issue to resolve itself. It it that doesn't happen whatever happened - # is probably fatal and no amount of waiting is going to fix it. - # # 0 - TCP Keepalives disabled; 1 - enabled - if "keepalives" not in db_options: - db_options["keepalives"] = int( - get_setting('INVENTREE_DB_TCP_KEEPALIVES', 'database.tcp_keepalives', 1) - ) - - # Seconds after connection is idle to send keep alive - if "keepalives_idle" not in db_options: - db_options["keepalives_idle"] = int( - get_setting('INVENTREE_DB_TCP_KEEPALIVES_IDLE', 'database.tcp_keepalives_idle', 1) - ) - - # Seconds after missing ACK to send another keep alive - if "keepalives_interval" not in db_options: - db_options["keepalives_interval"] = int( - get_setting("INVENTREE_DB_TCP_KEEPALIVES_INTERVAL", "database.tcp_keepalives_internal", "1") - ) - - # Number of missing ACKs before we close the connection - if "keepalives_count" not in db_options: - db_options["keepalives_count"] = int( - get_setting("INVENTREE_DB_TCP_KEEPALIVES_COUNT", "database.tcp_keepalives_count", "5") - ) - - # # Milliseconds for how long pending data should remain unacked - # by the remote server - # TODO: Supported starting in PSQL 11 - # "tcp_user_timeout": int(os.getenv("PGTCP_USER_TIMEOUT", "1000"), - - # Postgres's default isolation level is Read Committed which is - # normally fine, but most developers think the database server is - # actually going to do Serializable type checks on the queries to - # protect against simultaneous changes. - # https://www.postgresql.org/docs/devel/transaction-iso.html - # https://docs.djangoproject.com/en/3.2/ref/databases/#isolation-level - if "isolation_level" not in db_options: - serializable = get_boolean_setting('INVENTREE_DB_ISOLATION_SERIALIZABLE', 'database.serializable', False) - db_options["isolation_level"] = ISOLATION_LEVEL_SERIALIZABLE if serializable else ISOLATION_LEVEL_READ_COMMITTED - -# Specific options for MySql / MariaDB backend -elif "mysql" in db_engine: # pragma: no cover - # TODO TCP time outs and keepalives - - # MariaDB's default isolation level is Repeatable Read which is - # normally fine, but most developers think the database server is - # actually going to Serializable type checks on the queries to - # protect against siumltaneous changes. - # https://mariadb.com/kb/en/mariadb-transactions-and-isolation-levels-for-sql-server-users/#changing-the-isolation-level - # https://docs.djangoproject.com/en/3.2/ref/databases/#mysql-isolation-level - if "isolation_level" not in db_options: - serializable = get_boolean_setting('INVENTREE_DB_ISOLATION_SERIALIZABLE', 'database.serializable', False) - db_options["isolation_level"] = "serializable" if serializable else "read committed" - -# Specific options for sqlite backend -elif "sqlite" in db_engine: - # TODO: Verify timeouts are not an issue because no network is involved for SQLite - - # SQLite's default isolation level is Serializable due to SQLite's - # single writer implementation. Presumably as a result of this, it is - # not possible to implement any lower isolation levels in SQLite. - # https://www.sqlite.org/isolation.html - pass - -# Provide OPTIONS dict back to the database configuration dict -db_config['OPTIONS'] = db_options - -# Set testing options for the database -db_config['TEST'] = { - 'CHARSET': 'utf8', -} - -# Set collation option for mysql test database -if 'mysql' in db_engine: - db_config['TEST']['COLLATION'] = 'utf8_general_ci' # pragma: no cover - -DATABASES = { - 'default': db_config -} - -# login settings -REMOTE_LOGIN = get_boolean_setting('INVENTREE_REMOTE_LOGIN', 'remote_login_enabled', False) -REMOTE_LOGIN_HEADER = get_setting('INVENTREE_REMOTE_LOGIN_HEADER', 'remote_login_header', 'REMOTE_USER') - -# sentry.io integration for error reporting -SENTRY_ENABLED = get_boolean_setting('INVENTREE_SENTRY_ENABLED', 'sentry_enabled', False) -# Default Sentry DSN (can be overriden if user wants custom sentry integration) -INVENTREE_DSN = 'https://3928ccdba1d34895abde28031fd00100@o378676.ingest.sentry.io/6494600' -SENTRY_DSN = get_setting('INVENTREE_SENTRY_DSN', 'sentry_dsn', INVENTREE_DSN) -SENTRY_SAMPLE_RATE = float(get_setting('INVENTREE_SENTRY_SAMPLE_RATE', 'sentry_sample_rate', 0.1)) - -if SENTRY_ENABLED and SENTRY_DSN: # pragma: no cover - sentry_sdk.init( - dsn=SENTRY_DSN, - integrations=[DjangoIntegration(), ], - traces_sample_rate=1.0 if DEBUG else SENTRY_SAMPLE_RATE, - send_default_pii=True - ) - inventree_tags = { - 'testing': TESTING, - 'docker': DOCKER, - 'debug': DEBUG, - 'remote': REMOTE_LOGIN, - } - for key, val in inventree_tags.items(): - sentry_sdk.set_tag(f'inventree_{key}', val) - -# Cache configuration -cache_host = get_setting('INVENTREE_CACHE_HOST', 'cache.host', None) -cache_port = get_setting('INVENTREE_CACHE_PORT', 'cache.port', '6379', typecast=int) - -if cache_host: # pragma: no cover - # We are going to rely upon a possibly non-localhost for our cache, - # so don't wait too long for the cache as nothing in the cache should be - # irreplacable. - _cache_options = { - "CLIENT_CLASS": "django_redis.client.DefaultClient", - "SOCKET_CONNECT_TIMEOUT": int(os.getenv("CACHE_CONNECT_TIMEOUT", "2")), - "SOCKET_TIMEOUT": int(os.getenv("CACHE_SOCKET_TIMEOUT", "2")), - "CONNECTION_POOL_KWARGS": { - "socket_keepalive": config.is_true( - os.getenv("CACHE_TCP_KEEPALIVE", "1") - ), - "socket_keepalive_options": { - socket.TCP_KEEPCNT: int( - os.getenv("CACHE_KEEPALIVES_COUNT", "5") - ), - socket.TCP_KEEPIDLE: int( - os.getenv("CACHE_KEEPALIVES_IDLE", "1") - ), - socket.TCP_KEEPINTVL: int( - os.getenv("CACHE_KEEPALIVES_INTERVAL", "1") - ), - socket.TCP_USER_TIMEOUT: int( - os.getenv("CACHE_TCP_USER_TIMEOUT", "1000") - ), - }, - }, - } - CACHES = { - "default": { - "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": f"redis://{cache_host}:{cache_port}/0", - "OPTIONS": _cache_options, - }, - } -else: - CACHES = { - "default": { - "BACKEND": "django.core.cache.backends.locmem.LocMemCache", - }, - } - -_q_worker_timeout = int(get_setting('INVENTREE_BACKGROUND_TIMEOUT', 'background.timeout', 90)) - -# django-q background worker configuration -Q_CLUSTER = { - 'name': 'InvenTree', - 'label': 'Background Tasks', - 'workers': int(get_setting('INVENTREE_BACKGROUND_WORKERS', 'background.workers', 4)), - 'timeout': _q_worker_timeout, - 'retry': min(120, _q_worker_timeout + 30), - 'max_attempts': int(get_setting('INVENTREE_BACKGROUND_MAX_ATTEMPTS', 'background.max_attempts', 5)), - 'queue_limit': 50, - 'catch_up': False, - 'bulk': 10, - 'orm': 'default', - 'cache': 'default', - 'sync': False, -} - -# Configure django-q sentry integration -if SENTRY_ENABLED and SENTRY_DSN: - Q_CLUSTER['error_reporter'] = { - 'sentry': { - 'dsn': SENTRY_DSN - } - } - -if cache_host: # pragma: no cover - # If using external redis cache, make the cache the broker for Django Q - # as well - Q_CLUSTER["django_redis"] = "worker" - -# database user sessions -SESSION_ENGINE = 'user_sessions.backends.db' -LOGOUT_REDIRECT_URL = get_setting('INVENTREE_LOGOUT_REDIRECT_URL', 'logout_redirect_url', 'index') -SILENCED_SYSTEM_CHECKS = [ - 'admin.E410', -] - -# Password validation -# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators - -AUTH_PASSWORD_VALIDATORS = [ - { - 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', - }, -] - -# Extra (optional) URL validators -# See https://docs.djangoproject.com/en/2.2/ref/validators/#django.core.validators.URLValidator - -EXTRA_URL_SCHEMES = get_setting('INVENTREE_EXTRA_URL_SCHEMES', 'extra_url_schemes', []) - -if type(EXTRA_URL_SCHEMES) not in [list]: # pragma: no cover - logger.warning("extra_url_schemes not correctly formatted") - EXTRA_URL_SCHEMES = [] - -# Internationalization -# https://docs.djangoproject.com/en/dev/topics/i18n/ -LANGUAGE_CODE = get_setting('INVENTREE_LANGUAGE', 'language', 'en-us') -# Store language settings for 30 days -LANGUAGE_COOKIE_AGE = 2592000 - -# If a new language translation is supported, it must be added here -LANGUAGES = [ - ('cs', _('Czech')), - ('da', _('Danish')), - ('de', _('German')), - ('el', _('Greek')), - ('en', _('English')), - ('es', _('Spanish')), - ('es-mx', _('Spanish (Mexican)')), - ('fa', _('Farsi / Persian')), - ('fr', _('French')), - ('he', _('Hebrew')), - ('hu', _('Hungarian')), - ('it', _('Italian')), - ('ja', _('Japanese')), - ('ko', _('Korean')), - ('nl', _('Dutch')), - ('no', _('Norwegian')), - ('pl', _('Polish')), - ('pt', _('Portuguese')), - ('pt-BR', _('Portuguese (Brazilian)')), - ('ru', _('Russian')), - ('sl', _('Slovenian')), - ('sv', _('Swedish')), - ('th', _('Thai')), - ('tr', _('Turkish')), - ('vi', _('Vietnamese')), - ('zh-hans', _('Chinese')), -] - -# Testing interface translations -if get_boolean_setting('TEST_TRANSLATIONS', default_value=False): # pragma: no cover - # Set default language - LANGUAGE_CODE = 'xx' - - # Add to language catalog - LANGUAGES.append(('xx', 'Test')) - - # Add custom languages not provided by Django - EXTRA_LANG_INFO = { - 'xx': { - 'code': 'xx', - 'name': 'Test', - 'name_local': 'Test' - }, - } - LANG_INFO = dict(django.conf.locale.LANG_INFO, **EXTRA_LANG_INFO) - django.conf.locale.LANG_INFO = LANG_INFO - -# Currencies available for use -CURRENCIES = get_setting( - 'INVENTREE_CURRENCIES', 'currencies', - ['AUD', 'CAD', 'CNY', 'EUR', 'GBP', 'JPY', 'NZD', 'USD'], - typecast=list, -) - -# Maximum number of decimal places for currency rendering -CURRENCY_DECIMAL_PLACES = 6 - -# Check that each provided currency is supported -for currency in CURRENCIES: - if currency not in moneyed.CURRENCIES: # pragma: no cover - logger.error(f"Currency code '{currency}' is not supported") - sys.exit(1) - -# Custom currency exchange backend -EXCHANGE_BACKEND = 'InvenTree.exchange.InvenTreeExchange' - -# Email configuration options -EMAIL_BACKEND = get_setting('INVENTREE_EMAIL_BACKEND', 'email.backend', 'django.core.mail.backends.smtp.EmailBackend') -EMAIL_HOST = get_setting('INVENTREE_EMAIL_HOST', 'email.host', '') -EMAIL_PORT = get_setting('INVENTREE_EMAIL_PORT', 'email.port', 25, typecast=int) -EMAIL_HOST_USER = get_setting('INVENTREE_EMAIL_USERNAME', 'email.username', '') -EMAIL_HOST_PASSWORD = get_setting('INVENTREE_EMAIL_PASSWORD', 'email.password', '') -EMAIL_SUBJECT_PREFIX = get_setting('INVENTREE_EMAIL_PREFIX', 'email.prefix', '[InvenTree] ') -EMAIL_USE_TLS = get_boolean_setting('INVENTREE_EMAIL_TLS', 'email.tls', False) -EMAIL_USE_SSL = get_boolean_setting('INVENTREE_EMAIL_SSL', 'email.ssl', False) - -DEFAULT_FROM_EMAIL = get_setting('INVENTREE_EMAIL_SENDER', 'email.sender', '') - -EMAIL_USE_LOCALTIME = False -EMAIL_TIMEOUT = 60 - -LOCALE_PATHS = ( - BASE_DIR.joinpath('locale/'), -) - -TIME_ZONE = get_setting('INVENTREE_TIMEZONE', 'timezone', 'UTC') - -USE_I18N = True - -USE_L10N = True - -# Do not use native timezone support in "test" mode -# It generates a *lot* of cruft in the logs -if not TESTING: - USE_TZ = True # pragma: no cover - -DATE_INPUT_FORMATS = [ - "%Y-%m-%d", -] - -# crispy forms use the bootstrap templates -CRISPY_TEMPLATE_PACK = 'bootstrap4' - -# Use database transactions when importing / exporting data -IMPORT_EXPORT_USE_TRANSACTIONS = True - -SITE_ID = 1 - -# Load the allauth social backends -SOCIAL_BACKENDS = get_setting('INVENTREE_SOCIAL_BACKENDS', 'social_backends', [], typecast=list) - -for app in SOCIAL_BACKENDS: - INSTALLED_APPS.append(app) # pragma: no cover - -SOCIALACCOUNT_PROVIDERS = get_setting('INVENTREE_SOCIAL_PROVIDERS', 'social_providers', None, typecast=dict) - -SOCIALACCOUNT_STORE_TOKENS = True - -# settings for allauth -ACCOUNT_EMAIL_CONFIRMATION_EXPIRE_DAYS = get_setting('INVENTREE_LOGIN_CONFIRM_DAYS', 'login_confirm_days', 3, typecast=int) -ACCOUNT_LOGIN_ATTEMPTS_LIMIT = get_setting('INVENTREE_LOGIN_ATTEMPTS', 'login_attempts', 5, typecast=int) -ACCOUNT_DEFAULT_HTTP_PROTOCOL = get_setting('INVENTREE_LOGIN_DEFAULT_HTTP_PROTOCOL', 'login_default_protocol', 'http') -ACCOUNT_LOGOUT_ON_PASSWORD_CHANGE = True -ACCOUNT_PREVENT_ENUMERATION = True - -# override forms / adapters -ACCOUNT_FORMS = { - 'login': 'allauth.account.forms.LoginForm', - 'signup': 'InvenTree.forms.CustomSignupForm', - 'add_email': 'allauth.account.forms.AddEmailForm', - 'change_password': 'allauth.account.forms.ChangePasswordForm', - 'set_password': 'allauth.account.forms.SetPasswordForm', - 'reset_password': 'allauth.account.forms.ResetPasswordForm', - 'reset_password_from_key': 'allauth.account.forms.ResetPasswordKeyForm', - 'disconnect': 'allauth.socialaccount.forms.DisconnectForm', -} - -SOCIALACCOUNT_ADAPTER = 'InvenTree.forms.CustomSocialAccountAdapter' -ACCOUNT_ADAPTER = 'InvenTree.forms.CustomAccountAdapter' - -# Markdownify configuration -# Ref: https://django-markdownify.readthedocs.io/en/latest/settings.html - -MARKDOWNIFY = { - 'default': { - 'BLEACH': True, - 'WHITELIST_ATTRS': [ - 'href', - 'src', - 'alt', - ], - 'MARKDOWN_EXTENSIONS': [ - 'markdown.extensions.extra' - ], - 'WHITELIST_TAGS': [ - 'a', - 'abbr', - 'b', - 'blockquote', - 'em', - 'h1', 'h2', 'h3', - 'i', - 'img', - 'li', - 'ol', - 'p', - 'strong', - 'ul', - 'table', - 'thead', - 'tbody', - 'th', - 'tr', - 'td' - ], - } -} - -# Ignore these error typeps for in-database error logging -IGNORED_ERRORS = [ - Http404, - django.core.exceptions.PermissionDenied, -] - -# Maintenance mode -MAINTENANCE_MODE_RETRY_AFTER = 60 -MAINTENANCE_MODE_STATE_BACKEND = 'maintenance_mode.backends.StaticStorageBackend' - -# Are plugins enabled? -PLUGINS_ENABLED = get_boolean_setting('INVENTREE_PLUGINS_ENABLED', 'plugins_enabled', False) - -PLUGIN_FILE = config.get_plugin_file() - -# Plugin test settings -PLUGIN_TESTING = get_setting('INVENTREE_PLUGIN_TESTING', 'PLUGIN_TESTING', TESTING) # Are plugins beeing tested? -PLUGIN_TESTING_SETUP = get_setting('INVENTREE_PLUGIN_TESTING_SETUP', 'PLUGIN_TESTING_SETUP', False) # Load plugins from setup hooks in testing? -PLUGIN_TESTING_EVENTS = False # Flag if events are tested right now -PLUGIN_RETRY = get_setting('INVENTREE_PLUGIN_RETRY', 'PLUGIN_RETRY', 5) # How often should plugin loading be tried? -PLUGIN_FILE_CHECKED = False # Was the plugin file checked? - -# User interface customization values -CUSTOM_LOGO = get_custom_file('INVENTREE_CUSTOM_LOGO', 'customize.logo', 'custom logo', lookup_media=True) -CUSTOM_SPLASH = get_custom_file('INVENTREE_CUSTOM_SPLASH', 'customize.splash', 'custom splash') - -CUSTOMIZE = get_setting('INVENTREE_CUSTOMIZE', 'customize', {}) -if DEBUG: - logger.info("InvenTree running with DEBUG enabled") - -logger.info(f"MEDIA_ROOT: '{MEDIA_ROOT}'") -logger.info(f"STATIC_ROOT: '{STATIC_ROOT}'") diff --git a/sample.env b/sample.env index 9fff491..f1bae5d 100644 --- a/sample.env +++ b/sample.env @@ -38,7 +38,7 @@ INVENTREE_GUNICORN_TIMEOUT=30 INVENTREE_PLUGINS_ENABLED=False # Image tag that should be used -INVENTREE_IMAGE=inventree/inventree:0.10.1 +INVENTREE_IMAGE=inventree/inventree:0.11.3 REDIS_IMAGE=redis:7.0-alpine NGINX_IMAGE=nginxinc/nginx-unprivileged:stable-alpine # Postgres image must match version of pgdump in inventree image From 22166337cb5899468113127232a689f8f8fe34bb Mon Sep 17 00:00:00 2001 From: philipp Date: Sat, 16 Sep 2023 18:08:58 +0200 Subject: [PATCH 04/24] add license to project --- LICENSE | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 LICENSE diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..2071b23 --- /dev/null +++ b/LICENSE @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. From ea308dd7b00abc0ef95ae0d521649f6c2986d332 Mon Sep 17 00:00:00 2001 From: philipp Date: Sat, 16 Sep 2023 22:20:35 +0200 Subject: [PATCH 05/24] update documentation, rename compose file --- README.md | 26 ++++++++++++++++++++++++-- docker-compose.yml => compose.yml | 0 2 files changed, 24 insertions(+), 2 deletions(-) rename docker-compose.yml => compose.yml (100%) diff --git a/README.md b/README.md index 598fec6..a9f3ffd 100644 --- a/README.md +++ b/README.md @@ -8,15 +8,37 @@ These are the deployment files required to get InvenTree up and running. InvenTr ## Configuration -Copy the `sample.env` into a file named `.env` and make sure to adapt all values to your needs, especially secrets. +Copy the `sample.env` into a file named `.env` and make sure to adapt all values to your needs, especially secrets. Note that the redis cache is disabled by default. ## Installation -In order to run invoke an update or complete the first setup, `make update` is used to pull the latest images and apply all database migrations. +In order to run invoke an update or complete the first setup, the data folder must be created (`mkdir data`) and `make update` is used to pull the latest images and apply all database migrations. The command `make up` can be used to run the setup as a foreground service, `make "up -d"` can be used to run the setup in detached mode. +> Warning: + +When running inside LXC (e.g. Proxmox) with ZFS you might need to run add the following to your `/etc/docker/daemon.json` (and restart afterwards): + +```json +{ + "storage-driver": "vfs" +} +``` + +This can be required because Docker does not like ZFS and might have issues to store some layers. Note that using VFS has significant impact on the storage usage (might explode). + ### SSO +The following is an example on configuring SSO using OIDC and Keycloak as IdP. See the [InvenTree SSO docs](https://docs.inventree.org/en/latest/settings/SSO) for more details. + +#### Keycloak + +1. Create a new client (Type: OpenID Connect) +2. Enable Client authentication, Authorization, Standard flow. Disable everything else (explicitly Direct access grant) +3. Set home URL to `https://` and Redirect URL to `https:///accounts/keycloak/login/callback/`. + +#### Inventree + Login as InvenTree admin user. Under `Settings > Login Settings` make sure to `Enable SSO`. diff --git a/docker-compose.yml b/compose.yml similarity index 100% rename from docker-compose.yml rename to compose.yml From 1a89213c8089b2ce1f2df10d2e579d6e3618038f Mon Sep 17 00:00:00 2001 From: philipp Date: Sat, 16 Sep 2023 23:29:04 +0200 Subject: [PATCH 06/24] migrate from keycloak to oidc provider Django AllAuth dropped keycloak in recent release: https://django-allauth.readthedocs.io/en/latest/socialaccount/providers/keycloak.html --- README.md | 9 ++++++--- compose.yml | 19 +++++++++---------- sample.env | 9 ++++----- 3 files changed, 19 insertions(+), 18 deletions(-) diff --git a/README.md b/README.md index a9f3ffd..7b98dfd 100644 --- a/README.md +++ b/README.md @@ -30,15 +30,18 @@ This can be required because Docker does not like ZFS and might have issues to s ### SSO -The following is an example on configuring SSO using OIDC and Keycloak as IdP. See the [InvenTree SSO docs](https://docs.inventree.org/en/latest/settings/SSO) for more details. +The following is an example on configuring SSO using OIDC and Keycloak as IdP. See the [InvenTree SSO docs](https://docs.inventree.org/en/latest/settings/SSO) as well as the [AllAuth social providers](https://django-allauth.readthedocs.io/en/latest/socialaccount/providers/index.html) for more details. Note that Keycloak is not a valid provider anymore any OIDC [should be used](https://django-allauth.readthedocs.io/en/latest/socialaccount/providers/keycloak.html). #### Keycloak 1. Create a new client (Type: OpenID Connect) 2. Enable Client authentication, Authorization, Standard flow. Disable everything else (explicitly Direct access grant) -3. Set home URL to `https://` and Redirect URL to `https:///accounts/keycloak/login/callback/`. +3. Set home URL to `https://` and Redirect URL to `https:///accounts/oidc/login/callback/`. #### Inventree -Login as InvenTree admin user. Under `Settings > Login Settings` make sure to `Enable SSO`. +Login as InvenTree admin user and open the admin interface. +1. In the Admin interface, add a new group named "default" and click on "Save and edit". Disallow reading the admin settings and allow all other permissions you want a default user to have. +2. In the `Social Accounts` block, add a new Social Application. The provider must be oidc, the name and client id can be arbitrary value. The available default site must be added. +3. Go back to the application. Under `Settings > Login Settings` make sure to `Enable SSO` and `Enable SSO registration`. Also, set `Group on signup` to your default group. diff --git a/compose.yml b/compose.yml index 6d6dd69..dc7908c 100644 --- a/compose.yml +++ b/compose.yml @@ -82,17 +82,16 @@ services: environment: INVENTREE_SOCIAL_PROVIDERS: | { - "keycloak": { - "SERVERS": [ - { - "KEYCLOAK_URL": "${HKNG_KEYCLOAK_URL:?You must provide the 'HKNG_KEYCLOAK_URL' variable in the .env file}", - "KEYCLOAK_REALM": "${HKNG_KEYCLOAK_REALM:?You must provide the 'HKNG_KEYCLOAK_REALM' variable in the .env file}", - "APP": { - "client_id": "${HKNG_KEYCLOAK_CLIENT_ID:?You must provide the 'HKNG_KEYCLOAK_CLIENT_ID' variable in the .env file}", - "secret": "${HKNG_KEYCLOAK_CLIENT_SECRET:?You must provide the 'HKNG_KEYCLOAK_CLIENT_SECRET' variable in the .env file}" - } + "openid_connect": { + "SERVERS": [{ + "id": "oidc", + "name": "Hacknang SSO", + "server_url": "${HKNG_OIDC_URL:?You must provide the 'HKNG_OIDC_URL' variable in the .env file}", + "APP": { + "client_id": "${HKNG_OIDC_CLIENT_ID:?You must provide the 'HKNG_OIDC_CLIENT_ID' variable in the .env file}", + "secret": "${HKNG_OIDC_CLIENT_SECRET:?You must provide the 'HKNG_OIDC_CLIENT_SECRET' variable in the .env file}" } - ] + }] } } depends_on: diff --git a/sample.env b/sample.env index f1bae5d..b22915b 100644 --- a/sample.env +++ b/sample.env @@ -60,9 +60,8 @@ INVENTREE_SECRET_KEY=some-secret-key ALLOWED_HOSTS=inventree.example.com,www.inventree.example.com # SSO Config -INVENTREE_SOCIAL_BACKENDS=allauth.socialaccount.providers.keycloak +INVENTREE_SOCIAL_BACKENDS=allauth.socialaccount.providers.openid_connect -HKNG_KEYCLOAK_URL=https://keycloak.example.com -HKNG_KEYCLOAK_REALM=master -HKNG_KEYCLOAK_CLIENT_ID=example-client -HKNG_KEYCLOAK_CLIENT_SECRET=example-secret +HKNG_OIDC_URL=https://keycloak.example.com/realms/master/.well-known/openid-configuration +HKNG_OIDC_CLIENT_ID=example-client +HKNG_OIDC_SECRET=example-secret From b0b5ab3c53af9a274c80c3fe64848e1fc03e1328 Mon Sep 17 00:00:00 2001 From: Philipp Fruck Date: Sun, 19 May 2024 16:11:04 +0200 Subject: [PATCH 07/24] remove crappy volume stuff --- compose.yml | 18 ++++-------------- sample.env | 2 +- 2 files changed, 5 insertions(+), 15 deletions(-) diff --git a/compose.yml b/compose.yml index dc7908c..b5c9c21 100644 --- a/compose.yml +++ b/compose.yml @@ -51,7 +51,7 @@ services: - POSTGRES_DB=${INVENTREE_DB_NAME:?You must provide the 'INVENTREE_DB_NAME' variable in the .env file} volumes: # Map 'data' volume such that postgres database is stored externally - - inventree_data:/var/lib/postgresql/data/:z + - ${INVENTREE_EXT_VOLUME:?You must specify the 'INVENTREE_EXT_VOLUME' variable in the .env file!}:/var/lib/postgresql/data/:z restart: unless-stopped # redis acts as database cache manager @@ -98,7 +98,7 @@ services: - inventree-db volumes: # Data volume must map to /home/inventree/data - - inventree_data:/home/inventree/data:z + - ${INVENTREE_EXT_VOLUME:?You must specify the 'INVENTREE_EXT_VOLUME' variable in the .env file!}:/home/inventree/data:z - ./plugins:/home/inventree/InvenTree/plugins:z restart: unless-stopped @@ -114,7 +114,7 @@ services: - .env volumes: # Data volume must map to /home/inventree/data - - inventree_data:/home/inventree/data:z + - ${INVENTREE_EXT_VOLUME:?You must specify the 'INVENTREE_EXT_VOLUME' variable in the .env file!}:/home/inventree/data:z restart: unless-stopped # nginx acts as a reverse proxy @@ -135,15 +135,5 @@ services: # Refer to the provided example file as a starting point - ./nginx.prod.conf:/etc/nginx/conf.d/default.conf:ro,Z # nginx proxy needs access to static and media files - - inventree_data:/var/www:z + - ${INVENTREE_EXT_VOLUME:?You must specify the 'INVENTREE_EXT_VOLUME' variable in the .env file!}:/var/www:z restart: unless-stopped - -volumes: - # Persistent data, stored external to the container(s) - inventree_data: - driver: local - driver_opts: - type: none - o: bind - # This directory specified where InvenTree data are stored "outside" the docker containers - device: ${INVENTREE_EXT_VOLUME:?You must specify the 'INVENTREE_EXT_VOLUME' variable in the .env file!} diff --git a/sample.env b/sample.env index b22915b..1904bc9 100644 --- a/sample.env +++ b/sample.env @@ -5,7 +5,7 @@ COMPOSE_PROJECT_NAME=inventree # Note: You *must* un-comment this line, and point it to a path on your local machine # e.g. Linux -INVENTREE_EXT_VOLUME=data +INVENTREE_EXT_VOLUME=./data # e.g. Windows (docker desktop) #INVENTREE_EXT_VOLUME=c:/Users/me/inventree-data From c1c12cfa76fbf60cda62ee94181e8b0a82358193 Mon Sep 17 00:00:00 2001 From: Philipp Fruck Date: Sun, 19 May 2024 16:14:08 +0200 Subject: [PATCH 08/24] add new INVENTREE_SITE_URL variable --- sample.env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sample.env b/sample.env index 1904bc9..40df1e0 100644 --- a/sample.env +++ b/sample.env @@ -57,7 +57,7 @@ INVENTREE_DB_PASSWORD=password # Django configuration INVENTREE_SECRET_KEY=some-secret-key -ALLOWED_HOSTS=inventree.example.com,www.inventree.example.com +INVENTREE_SITE_URL=inventree.example.com,www.inventree.example.com # SSO Config INVENTREE_SOCIAL_BACKENDS=allauth.socialaccount.providers.openid_connect From 73383eff43f6bb87555129504c01749c9ecdc401 Mon Sep 17 00:00:00 2001 From: Philipp Fruck Date: Wed, 17 Sep 2025 13:33:03 +0200 Subject: [PATCH 09/24] cleanup deployment --- Makefile | 4 ++-- backup.sh | 6 ++++++ compose.yml | 15 --------------- 3 files changed, 8 insertions(+), 17 deletions(-) create mode 100644 backup.sh diff --git a/Makefile b/Makefile index a152205..11ba6f3 100644 --- a/Makefile +++ b/Makefile @@ -8,9 +8,9 @@ psql: update: read -p "Update will cause downtime of the server. Are you sure you want to continue? Press Ctrl+c to abort!" _ - $(COMPOSE) down $(COMPOSE) pull - $(COMPOSE) run inventree-server invoke update + $(COMPOSE) down + $(COMPOSE) run --rm inventree-server invoke update $(COMPOSE) up -d data: # podman does not autocreate data folder diff --git a/backup.sh b/backup.sh new file mode 100644 index 0000000..81ad1e5 --- /dev/null +++ b/backup.sh @@ -0,0 +1,6 @@ +#!/bin/sh +time=$(date +"%Y-%m-%dT%H:%M:%S%z") +dir=backup +[ -d "${dir}" ] || mkdir -p "${dir}" +docker compose run --rm -u postgres inventree-db sh -c 'PGPASSWORD=$POSTGRES_PASSWORD pg_dump -h inventree-db -p 5432 -U $POSTGRES_USER inventree' > "${dir}/${time}.sql" +# to restore: pg_restore -d newdb db.dump diff --git a/compose.yml b/compose.yml index b5c9c21..efafa65 100644 --- a/compose.yml +++ b/compose.yml @@ -79,21 +79,6 @@ services: - 8000 env_file: - .env - environment: - INVENTREE_SOCIAL_PROVIDERS: | - { - "openid_connect": { - "SERVERS": [{ - "id": "oidc", - "name": "Hacknang SSO", - "server_url": "${HKNG_OIDC_URL:?You must provide the 'HKNG_OIDC_URL' variable in the .env file}", - "APP": { - "client_id": "${HKNG_OIDC_CLIENT_ID:?You must provide the 'HKNG_OIDC_CLIENT_ID' variable in the .env file}", - "secret": "${HKNG_OIDC_CLIENT_SECRET:?You must provide the 'HKNG_OIDC_CLIENT_SECRET' variable in the .env file}" - } - }] - } - } depends_on: - inventree-db volumes: From f310a238d5b8382911da1933147a159b877a3698 Mon Sep 17 00:00:00 2001 From: Philipp Fruck Date: Wed, 17 Sep 2025 14:26:21 +0200 Subject: [PATCH 10/24] adhere to upstream compose file for v1.0.0 --- Caddyfile | 77 ++++++++++++++++++++++++++++++++++++++++++ compose.yml | 72 +++++++++++++++++++-------------------- nginx.prod.conf | 64 ----------------------------------- sample.env | 90 +++++++++++++++++++++---------------------------- 4 files changed, 151 insertions(+), 152 deletions(-) create mode 100644 Caddyfile delete mode 100644 nginx.prod.conf diff --git a/Caddyfile b/Caddyfile new file mode 100644 index 0000000..23268c6 --- /dev/null +++ b/Caddyfile @@ -0,0 +1,77 @@ +# Example Caddyfile for InvenTree +# The following environment variables may be used: +# - INVENTREE_SITE_URL: The upstream URL of the InvenTree site (default: inventree.localhost) +# - INVENTREE_SERVER: The internal URL of the InvenTree container (default: http://inventree-server:8000) +# +# Note that while this file is a good starting point, it may need to be modified to suit your specific requirements +# +# Ref to the Caddyfile documentation: https://caddyserver.com/docs/caddyfile + + +# Logging configuration for Caddy +(log_common) { + log { + output file /var/log/caddy/{args[0]}.access.log + } +} + +# CORS headers control (used for static and media files) +(cors-headers) { + header Allow GET,HEAD,OPTIONS + header Access-Control-Allow-Origin * + header Access-Control-Allow-Methods GET,HEAD,OPTIONS + header Access-Control-Allow-Headers Authorization,Content-Type,User-Agent + + @cors_preflight{args[0]} method OPTIONS + + handle @cors_preflight{args[0]} { + respond "" 204 + } +} + +# The default server address is configured in the .env file +# If not specified, the default address is used - http://inventree.localhost +# If you need to listen on multiple addresses, or use a different port, you can modify this section directly +http://inventree.ctbk.de { + import log_common inventree + + encode gzip + + request_body { + max_size 100MB + } + + # Handle static request files + handle_path /static/* { + import cors-headers static + + root * /var/www/static + file_server + } + + # Handle media request files + handle_path /media/* { + import cors-headers media + + root * /var/www/media + file_server + + # Force download of media files (for security) + # Comment out this line if you do not want to force download + header Content-Disposition attachment + + # Authentication is handled by the forward_auth directive + # This is required to ensure that media files are only accessible to authenticated users + forward_auth {$INVENTREE_SERVER:"http://inventree-server:8000"} { + uri /auth/ + } + } + + # All other requests are proxied to the InvenTree server + reverse_proxy {$INVENTREE_SERVER:"http://inventree-server:8000"} { + + # If you are running behind another proxy, you may need to specify 'trusted_proxies' + # Ref: https://caddyserver.com/docs/json/apps/http/servers/trusted_proxies/ + # trusted_proxies ... + } +} diff --git a/compose.yml b/compose.yml index efafa65..f5af5c6 100644 --- a/compose.yml +++ b/compose.yml @@ -1,10 +1,8 @@ -version: "3.8" - # Docker compose recipe for a production-ready InvenTree setup, with the following containers: # - PostgreSQL as the database backend # - gunicorn as the InvenTree web server # - django-q as the InvenTree background worker process -# - nginx as a reverse proxy +# - Caddy as a reverse proxy # - redis as the cache manager (optional, disabled by default) # --------------------- @@ -34,16 +32,20 @@ version: "3.8" # INVENTREE_TAG=0.7.5 # +# ---------------------------- +# Docker compose customization +# ---------------------------- +# If you wish to customize the docker-compose script, you should only do so if you understand the stack! +# Do not expect support for customizations that are not part of the standard InvenTree setup! + services: # Database service # Use PostgreSQL as the database backend inventree-db: + image: postgres:13 container_name: inventree-db - image: ${POSTGRES_IMAGE:?You must provide the 'POSTGRES_IMAGE' variable in the .env file} expose: - ${INVENTREE_DB_PORT:-5432}/tcp - env_file: - - .env environment: - PGDATA=/var/lib/postgresql/data/pgdb - POSTGRES_USER=${INVENTREE_DB_USER:?You must provide the 'INVENTREE_DB_USER' variable in the .env file} @@ -55,43 +57,39 @@ services: restart: unless-stopped # redis acts as database cache manager - # only runs under the "redis" profile : https://docs.docker.com/compose/profiles/ inventree-cache: + image: redis:7-alpine container_name: inventree-cache - image: ${REDIS_IMAGE:?You must provide the 'REDIS_IMAGE' variable in the .env file} - depends_on: - - inventree-db env_file: - .env - profiles: - - redis expose: - - ${INVENTREE_CACHE_PORT:-6379} + - ${INVENTREE_CACHE_PORT:-6379} restart: always # InvenTree web server service # Uses gunicorn as the web server inventree-server: - container_name: inventree-server # If you wish to specify a particular InvenTree version, do so here - image: ${INVENTREE_IMAGE:?You must provide the 'INVENTREE_IMAGE' variable in the .env file} + image: inventree/inventree:${INVENTREE_TAG:-stable} + container_name: inventree-server + # Only change this port if you understand the stack. expose: - - 8000 - env_file: - - .env + - 8000 depends_on: - inventree-db + - inventree-cache + env_file: + - .env volumes: # Data volume must map to /home/inventree/data - - ${INVENTREE_EXT_VOLUME:?You must specify the 'INVENTREE_EXT_VOLUME' variable in the .env file!}:/home/inventree/data:z - - ./plugins:/home/inventree/InvenTree/plugins:z + - ${INVENTREE_EXT_VOLUME}:/home/inventree/data:z restart: unless-stopped # Background worker process handles long-running or periodic tasks inventree-worker: - container_name: inventree-worker # If you wish to specify a particular InvenTree version, do so here - image: ${INVENTREE_IMAGE:?You must provide the 'INVENTREE_IMAGE' variable in the .env file} + image: inventree/inventree:${INVENTREE_TAG:-stable} + container_name: inventree-worker command: invoke worker depends_on: - inventree-server @@ -99,26 +97,26 @@ services: - .env volumes: # Data volume must map to /home/inventree/data - - ${INVENTREE_EXT_VOLUME:?You must specify the 'INVENTREE_EXT_VOLUME' variable in the .env file!}:/home/inventree/data:z + - ${INVENTREE_EXT_VOLUME}:/home/inventree/data:z restart: unless-stopped - # nginx acts as a reverse proxy - # static files are served directly by nginx - # media files are served by nginx, although authentication is redirected to inventree-server - # web requests are redirected to gunicorn - # NOTE: You will need to provide a working nginx.conf file! + # caddy acts as reverse proxy and static file server + # https://hub.docker.com/_/caddy inventree-proxy: container_name: inventree-proxy - image: ${NGINX_IMAGE:?You must provide the 'NGINX_IMAGE' variable in the .env file} + image: caddy:alpine + restart: always depends_on: - inventree-server ports: - # Default web port is 1337 (can be changed in the .env file) - - ${INVENTREE_WEB_PORT:-1337}:8080 + - ${INVENTREE_WEB_PORT:-80}:80 + - 443:443 + env_file: + - .env volumes: - # Provide nginx configuration file to the container - # Refer to the provided example file as a starting point - - ./nginx.prod.conf:/etc/nginx/conf.d/default.conf:ro,Z - # nginx proxy needs access to static and media files - - ${INVENTREE_EXT_VOLUME:?You must specify the 'INVENTREE_EXT_VOLUME' variable in the .env file!}:/var/www:z - restart: unless-stopped + - ./Caddyfile:/etc/caddy/Caddyfile:ro,z + - ${INVENTREE_EXT_VOLUME}/static:/var/www/static:z + - ${INVENTREE_EXT_VOLUME}/media:/var/www/media:z + - ${INVENTREE_EXT_VOLUME}:/var/log:z + - ${INVENTREE_EXT_VOLUME}:/data:z + - ${INVENTREE_EXT_VOLUME}:/config:z diff --git a/nginx.prod.conf b/nginx.prod.conf deleted file mode 100644 index 1ebdcd2..0000000 --- a/nginx.prod.conf +++ /dev/null @@ -1,64 +0,0 @@ -server { - - # Listen for connection on (internal) port 8080 (unprivileged nginx) - listen 8080; - - real_ip_header proxy_protocol; - - location / { - - proxy_set_header Host $http_host; - proxy_set_header X-Forwarded-By $server_addr:$server_port; - proxy_set_header X-Forwarded-For $remote_addr; - proxy_set_header X-Forwarded-Proto $scheme; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header CLIENT_IP $remote_addr; - - proxy_pass_request_headers on; - - proxy_redirect off; - - client_max_body_size 100M; - - proxy_buffering off; - proxy_request_buffering off; - - # Change 'inventree-server' to the name of the inventree server container, - # and '8000' to the INVENTREE_WEB_PORT (if not default) - proxy_pass http://inventree-server:8000; - } - - # Redirect any requests for static files - location /static/ { - alias /var/www/static/; - autoindex on; - - # Caching settings - expires 30d; - add_header Pragma public; - add_header Cache-Control "public"; - } - - # Redirect any requests for media files - location /media/ { - alias /var/www/media/; - - # Media files require user authentication - auth_request /auth; - - # Content header to force download - add_header Content-disposition "attachment"; - } - - # Use the 'user' API endpoint for auth - location /auth { - internal; - - proxy_pass http://inventree-server:8000/auth/; - - proxy_pass_request_body off; - proxy_set_header Content-Length ""; - proxy_set_header X-Original-URI $request_uri; - } - -} diff --git a/sample.env b/sample.env index 40df1e0..859f58e 100644 --- a/sample.env +++ b/sample.env @@ -1,67 +1,55 @@ -# InvenTree environment variables for a postgresql production setup +# InvenTree environment variables for docker compose deployment +# For a full list of the available configuration options, refer to the InvenTree documentation: +# https://docs.inventree.org/en/stable/start/config/ + +# Specify the name of the docker-compose project COMPOSE_PROJECT_NAME=inventree -# Location of persistent database data (stored external to the docker containers) -# Note: You *must* un-comment this line, and point it to a path on your local machine +# InvenTree version tag (e.g. 'stable' / 'latest' / 'x.x.x') +INVENTREE_TAG=stable -# e.g. Linux -INVENTREE_EXT_VOLUME=./data +# InvenTree server URL - update this to match your server URL +INVENTREE_SITE_URL="http://inventree.localhost" +#INVENTREE_SITE_URL="http://192.168.1.2" # You can specify a local IP address here +#INVENTREE_SITE_URL="https://inventree.my-domain.com" # Or a public domain name (which you control) -# e.g. Windows (docker desktop) -#INVENTREE_EXT_VOLUME=c:/Users/me/inventree-data - -# Default web port for the InvenTree server -INVENTREE_WEB_PORT=8080 +# Specify the location of the external data volume +# By default, placed in local directory 'inventree-data' +INVENTREE_EXT_VOLUME=./inventree-data # Ensure debug is false for a production setup -INVENTREE_DEBUG=False INVENTREE_LOG_LEVEL=WARNING +# Enable custom plugins? +INVENTREE_PLUGINS_ENABLED=True + +# Run migrations automatically? +INVENTREE_AUTO_UPDATE=True + +# InvenTree superuser account details +# Un-comment (and complete) these lines to auto-create an admin account +#INVENTREE_ADMIN_USER= +#INVENTREE_ADMIN_PASSWORD= +#INVENTREE_ADMIN_EMAIL= + # Database configuration options -# Note: The example setup is for a PostgreSQL database +# DO NOT CHANGE THESE SETTINGS (unless you really know what you are doing) INVENTREE_DB_ENGINE=postgresql INVENTREE_DB_NAME=inventree INVENTREE_DB_HOST=inventree-db INVENTREE_DB_PORT=5432 -# Redis cache setup (disabled by default) -# Un-comment the following lines to enable Redis cache -# Note that you will also have to run docker-compose with the --profile redis command -# Refer to settings.py for other cache options -#INVENTREE_CACHE_HOST=inventree-cache -#INVENTREE_CACHE_PORT=6379 +# Database credentials - These should be changed from the default values! +# Note: These are *NOT* the InvenTree server login credentials, +# they are the credentials for the PostgreSQL database +INVENTREE_DB_USER=pguser +INVENTREE_DB_PASSWORD=pgpassword + +# Redis cache setup +# Refer to the documentation for other cache options +INVENTREE_CACHE_ENABLED=True +INVENTREE_CACHE_HOST=inventree-cache +INVENTREE_CACHE_PORT=6379 # Options for gunicorn server -INVENTREE_GUNICORN_TIMEOUT=30 - -# Enable custom plugins? -INVENTREE_PLUGINS_ENABLED=False - -# Image tag that should be used -INVENTREE_IMAGE=inventree/inventree:0.11.3 -REDIS_IMAGE=redis:7.0-alpine -NGINX_IMAGE=nginxinc/nginx-unprivileged:stable-alpine -# Postgres image must match version of pgdump in inventree image -POSTGRES_IMAGE=postgres:13-alpine - -# InvenTree admin account details -# make sure to use secure credentials these lines to auto-create an admin acount -INVENTREE_ADMIN_USER=admin -INVENTREE_ADMIN_PASSWORD=password -INVENTREE_ADMIN_EMAIL=admin@inventree.example - -# Database credentials - These must be configured before running -# Change from the default values! -INVENTREE_DB_USER=inventree -INVENTREE_DB_PASSWORD=password - -# Django configuration -INVENTREE_SECRET_KEY=some-secret-key -INVENTREE_SITE_URL=inventree.example.com,www.inventree.example.com - -# SSO Config -INVENTREE_SOCIAL_BACKENDS=allauth.socialaccount.providers.openid_connect - -HKNG_OIDC_URL=https://keycloak.example.com/realms/master/.well-known/openid-configuration -HKNG_OIDC_CLIENT_ID=example-client -HKNG_OIDC_SECRET=example-secret +INVENTREE_GUNICORN_TIMEOUT=90 From 1b130a48ef723fdbf24251a451d74af0ec953bcc Mon Sep 17 00:00:00 2001 From: Philipp Fruck Date: Wed, 17 Sep 2025 14:40:23 +0200 Subject: [PATCH 11/24] upgrade postgres to 17 --- compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compose.yml b/compose.yml index f5af5c6..ff4f7a4 100644 --- a/compose.yml +++ b/compose.yml @@ -42,7 +42,7 @@ services: # Database service # Use PostgreSQL as the database backend inventree-db: - image: postgres:13 + image: postgres:17 container_name: inventree-db expose: - ${INVENTREE_DB_PORT:-5432}/tcp From 85f5d6c4ab7854192031b6670705525682f9f3b6 Mon Sep 17 00:00:00 2001 From: Philipp Fruck Date: Wed, 17 Sep 2025 14:50:00 +0200 Subject: [PATCH 12/24] switch to alpine version of postgres --- compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compose.yml b/compose.yml index ff4f7a4..8f80693 100644 --- a/compose.yml +++ b/compose.yml @@ -42,7 +42,7 @@ services: # Database service # Use PostgreSQL as the database backend inventree-db: - image: postgres:17 + image: postgres:17-alpine container_name: inventree-db expose: - ${INVENTREE_DB_PORT:-5432}/tcp From c78c73c8c21f1a6ad89ec9a7170f6f9030be5ec6 Mon Sep 17 00:00:00 2001 From: Philipp Date: Sun, 19 Mar 2023 18:39:53 +0100 Subject: [PATCH 13/24] initial dump of deployment files --- .gitignore | 2 + Makefile | 24 ++ README.md | 22 ++ docker-compose.yml | 152 ++++++++ nginx.prod.conf | 64 ++++ patch/config.py | 347 +++++++++++++++++ patch/settings.py | 914 +++++++++++++++++++++++++++++++++++++++++++++ sample.env | 68 ++++ 8 files changed, 1593 insertions(+) create mode 100644 .gitignore create mode 100644 Makefile create mode 100644 README.md create mode 100644 docker-compose.yml create mode 100644 nginx.prod.conf create mode 100644 patch/config.py create mode 100644 patch/settings.py create mode 100644 sample.env diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..6ee0103 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +/data +/.env diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..4abc95c --- /dev/null +++ b/Makefile @@ -0,0 +1,24 @@ +CONFIG_FILE = conf/config.env +SECRET_FILE = conf/secrets.env + +COMPOSE_CMD ?= docker compose +# use unix:///run/docker.sock for docker socket, unix://${XDG_RUNTIME_DIR}/podman/podman.sock for podman +DOCKER_HOST ?= unix:///run/docker.sock +COMPOSE = DOCKER_HOST=$(DOCKER_HOST) $(COMPOSE_CMD) + +psql: + $(COMPOSE) exec inventree-db sh -c 'psql $$POSTGRES_USER $$POSTGRES_DB' + +update: + read -p "Update will cause downtime of the server. Are you sure you want to continue? Press Ctrl+c to abort!" _ + $(COMPOSE) down + $(COMPOSE) pull + $(COMPOSE) run inventree-server invoke update + $(COMPOSE) up -d + +data: # podman does not autocreate data folder + mkdir data + +# pass all commands to compose cli +%: data + $(COMPOSE) $@ diff --git a/README.md b/README.md new file mode 100644 index 0000000..598fec6 --- /dev/null +++ b/README.md @@ -0,0 +1,22 @@ +# InvenTree Deployment + +These are the deployment files required to get InvenTree up and running. InvenTree is deployed as a `docker compose` setup and therefore has the following dependencies: + +- Podman/Docker +- Docker Compose +- Make (as script runner) + +## Configuration + +Copy the `sample.env` into a file named `.env` and make sure to adapt all values to your needs, especially secrets. + +## Installation + +In order to run invoke an update or complete the first setup, `make update` is used to pull the latest images and apply all database migrations. + +The command `make up` can be used to run the setup as a foreground service, `make "up -d"` can be used to run the setup in detached mode. + +### SSO + +Login as InvenTree admin user. Under `Settings > Login Settings` make sure to `Enable SSO`. + diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..c7957b7 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,152 @@ +version: "3.8" + +# Docker compose recipe for a production-ready InvenTree setup, with the following containers: +# - PostgreSQL as the database backend +# - gunicorn as the InvenTree web server +# - django-q as the InvenTree background worker process +# - nginx as a reverse proxy +# - redis as the cache manager (optional, disabled by default) + +# --------------------- +# READ BEFORE STARTING! +# --------------------- + +# ----------------------------- +# Setting environment variables +# ----------------------------- +# Shared environment variables should be stored in the .env file +# Changes made to this file are reflected across all containers! +# +# IMPORTANT NOTE: +# You should not have to change *anything* within this docker-compose.yml file! +# Instead, make any changes in the .env file! + +# ------------------------ +# InvenTree Image Versions +# ------------------------ +# By default, this docker-compose script targets the STABLE version of InvenTree, +# image: inventree/inventree:stable +# +# To run the LATEST (development) version of InvenTree, +# change the INVENTREE_TAG variable (in the .env file) to "latest" +# +# Alternatively, you could target a specific tagged release version with (for example): +# INVENTREE_TAG=0.7.5 +# + +services: + # Database service + # Use PostgreSQL as the database backend + inventree-db: + container_name: inventree-db + image: ${POSTGRES_IMAGE:?You must provide the 'POSTGRES_IMAGE' variable in the .env file} + expose: + - ${INVENTREE_DB_PORT:-5432}/tcp + env_file: + - .env + environment: + - PGDATA=/var/lib/postgresql/data/pgdb + - POSTGRES_USER=${INVENTREE_DB_USER:?You must provide the 'INVENTREE_DB_USER' variable in the .env file} + - POSTGRES_PASSWORD=${INVENTREE_DB_PASSWORD:?You must provide the 'INVENTREE_DB_PASSWORD' variable in the .env file} + - POSTGRES_DB=${INVENTREE_DB_NAME:?You must provide the 'INVENTREE_DB_NAME' variable in the .env file} + volumes: + # Map 'data' volume such that postgres database is stored externally + - inventree_data:/var/lib/postgresql/data/:z + restart: unless-stopped + + # redis acts as database cache manager + # only runs under the "redis" profile : https://docs.docker.com/compose/profiles/ + inventree-cache: + container_name: inventree-cache + image: ${REDIS_IMAGE:?You must provide the 'REDIS_IMAGE' variable in the .env file} + depends_on: + - inventree-db + env_file: + - .env + profiles: + - redis + expose: + - ${INVENTREE_CACHE_PORT:-6379} + restart: always + + # InvenTree web server service + # Uses gunicorn as the web server + inventree-server: + container_name: inventree-server + # If you wish to specify a particular InvenTree version, do so here + image: ${INVENTREE_IMAGE:?You must provide the 'INVENTREE_IMAGE' variable in the .env file} + expose: + - 8000 + env_file: + - .env + environment: + INVENTREE_SOCIAL_PROVIDERS: | + { + "keycloak": { + "SERVERS": [ + { + "KEYCLOAK_URL": "${HKNG_KEYCLOAK_URL:?You must provide the 'HKNG_KEYCLOAK_URL' variable in the .env file}", + "KEYCLOAK_REALM": "${HKNG_KEYCLOAK_REALM:?You must provide the 'HKNG_KEYCLOAK_REALM' variable in the .env file}", + "APP": { + "client_id": "${HKNG_KEYCLOAK_CLIENT_ID:?You must provide the 'HKNG_KEYCLOAK_CLIENT_ID' variable in the .env file}", + "secret": "${HKNG_KEYCLOAK_CLIENT_SECRET:?You must provide the 'HKNG_KEYCLOAK_CLIENT_SECRET' variable in the .env file}" + } + } + ] + } + } + depends_on: + - inventree-db + volumes: + # Data volume must map to /home/inventree/data + - inventree_data:/home/inventree/data:z + # ugly backport of 0.11.0 features, to be removed + - ./patch/settings.py:/home/inventree/InvenTree/InvenTree/settings.py:ro,Z + - ./patch/config.py:/home/inventree/InvenTree/InvenTree/config.py:zo,Z + restart: unless-stopped + + # Background worker process handles long-running or periodic tasks + inventree-worker: + container_name: inventree-worker + # If you wish to specify a particular InvenTree version, do so here + image: ${INVENTREE_IMAGE:?You must provide the 'INVENTREE_IMAGE' variable in the .env file} + command: invoke worker + depends_on: + - inventree-server + env_file: + - .env + volumes: + # Data volume must map to /home/inventree/data + - inventree_data:/home/inventree/data:z + restart: unless-stopped + + # nginx acts as a reverse proxy + # static files are served directly by nginx + # media files are served by nginx, although authentication is redirected to inventree-server + # web requests are redirected to gunicorn + # NOTE: You will need to provide a working nginx.conf file! + inventree-proxy: + container_name: inventree-proxy + image: ${NGINX_IMAGE:?You must provide the 'NGINX_IMAGE' variable in the .env file} + depends_on: + - inventree-server + ports: + # Default web port is 1337 (can be changed in the .env file) + - ${INVENTREE_WEB_PORT:-1337}:8080 + volumes: + # Provide nginx configuration file to the container + # Refer to the provided example file as a starting point + - ./nginx.prod.conf:/etc/nginx/conf.d/default.conf:ro,Z + # nginx proxy needs access to static and media files + - inventree_data:/var/www:z + restart: unless-stopped + +volumes: + # Persistent data, stored external to the container(s) + inventree_data: + driver: local + driver_opts: + type: none + o: bind + # This directory specified where InvenTree data are stored "outside" the docker containers + device: ${INVENTREE_EXT_VOLUME:?You must specify the 'INVENTREE_EXT_VOLUME' variable in the .env file!} diff --git a/nginx.prod.conf b/nginx.prod.conf new file mode 100644 index 0000000..1ebdcd2 --- /dev/null +++ b/nginx.prod.conf @@ -0,0 +1,64 @@ +server { + + # Listen for connection on (internal) port 8080 (unprivileged nginx) + listen 8080; + + real_ip_header proxy_protocol; + + location / { + + proxy_set_header Host $http_host; + proxy_set_header X-Forwarded-By $server_addr:$server_port; + proxy_set_header X-Forwarded-For $remote_addr; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header CLIENT_IP $remote_addr; + + proxy_pass_request_headers on; + + proxy_redirect off; + + client_max_body_size 100M; + + proxy_buffering off; + proxy_request_buffering off; + + # Change 'inventree-server' to the name of the inventree server container, + # and '8000' to the INVENTREE_WEB_PORT (if not default) + proxy_pass http://inventree-server:8000; + } + + # Redirect any requests for static files + location /static/ { + alias /var/www/static/; + autoindex on; + + # Caching settings + expires 30d; + add_header Pragma public; + add_header Cache-Control "public"; + } + + # Redirect any requests for media files + location /media/ { + alias /var/www/media/; + + # Media files require user authentication + auth_request /auth; + + # Content header to force download + add_header Content-disposition "attachment"; + } + + # Use the 'user' API endpoint for auth + location /auth { + internal; + + proxy_pass http://inventree-server:8000/auth/; + + proxy_pass_request_body off; + proxy_set_header Content-Length ""; + proxy_set_header X-Original-URI $request_uri; + } + +} diff --git a/patch/config.py b/patch/config.py new file mode 100644 index 0000000..720e7bd --- /dev/null +++ b/patch/config.py @@ -0,0 +1,347 @@ +"""Helper functions for loading InvenTree configuration options.""" + +import datetime +import json +import logging +import os +import random +import shutil +import string +from pathlib import Path + +logger = logging.getLogger('inventree') +CONFIG_DATA = None +CONFIG_LOOKUPS = {} + + +def to_list(value, delimiter=','): + """Take a configuration setting and make sure it is a list. + + For example, we might have a configuration setting taken from the .config file, + which is already a list. + + However, the same setting may be specified via an environment variable, + using a comma delimited string! + """ + + if type(value) in [list, tuple]: + return value + + # Otherwise, force string value + value = str(value) + + return [x.strip() for x in value.split(delimiter)] + + +def to_dict(value): + """Take a configuration setting and make sure it is a dict. + + For example, we might have a configuration setting taken from the .config file, + which is already an object/dict. + + However, the same setting may be specified via an environment variable, + using a valid JSON string! + """ + if value is None: + return {} + + if type(value) == dict: + return value + + try: + return json.loads(value) + except Exception as error: + logger.error(f"Failed to parse value '{value}' as JSON with error {error}. Ensure value is a valid JSON string.") + return {} + + +def is_true(x): + """Shortcut function to determine if a value "looks" like a boolean""" + return str(x).strip().lower() in ['1', 'y', 'yes', 't', 'true', 'on'] + + +def get_base_dir() -> Path: + """Returns the base (top-level) InvenTree directory.""" + return Path(__file__).parent.parent.resolve() + + +def ensure_dir(path: Path) -> None: + """Ensure that a directory exists. + + If it does not exist, create it. + """ + + if not path.exists(): + path.mkdir(parents=True, exist_ok=True) + + +def get_config_file(create=True) -> Path: + """Returns the path of the InvenTree configuration file. + + Note: It will be created it if does not already exist! + """ + base_dir = get_base_dir() + + cfg_filename = os.getenv('INVENTREE_CONFIG_FILE') + + if cfg_filename: + cfg_filename = Path(cfg_filename.strip()).resolve() + else: + # Config file is *not* specified - use the default + cfg_filename = base_dir.joinpath('config.yaml').resolve() + + if not cfg_filename.exists() and create: + print("InvenTree configuration file 'config.yaml' not found - creating default file") + ensure_dir(cfg_filename.parent) + + cfg_template = base_dir.joinpath("config_template.yaml") + shutil.copyfile(cfg_template, cfg_filename) + print(f"Created config file {cfg_filename}") + + return cfg_filename + + +def load_config_data(set_cache: bool = False) -> map: + """Load configuration data from the config file. + + Arguments: + set_cache(bool): If True, the configuration data will be cached for future use after load. + """ + global CONFIG_DATA + + # use cache if populated + # skip cache if cache should be set + if CONFIG_DATA is not None and not set_cache: + return CONFIG_DATA + + import yaml + + cfg_file = get_config_file() + + with open(cfg_file, 'r') as cfg: + data = yaml.safe_load(cfg) + + # Set the cache if requested + if set_cache: + CONFIG_DATA = data + + return data + + +def get_setting(env_var=None, config_key=None, default_value=None, typecast=None): + """Helper function for retrieving a configuration setting value. + + - First preference is to look for the environment variable + - Second preference is to look for the value of the settings file + - Third preference is the default value + + Arguments: + env_var: Name of the environment variable e.g. 'INVENTREE_STATIC_ROOT' + config_key: Key to lookup in the configuration file + default_value: Value to return if first two options are not provided + typecast: Function to use for typecasting the value + """ + def try_typecasting(value, source: str): + """Attempt to typecast the value""" + + # Force 'list' of strings + if typecast is list: + value = to_list(value) + + # Valid JSON string is required + elif typecast is dict: + value = to_dict(value) + + elif typecast is not None: + # Try to typecast the value + try: + val = typecast(value) + set_metadata(source) + return val + except Exception as error: + logger.error(f"Failed to typecast '{env_var}' with value '{value}' to type '{typecast}' with error {error}") + + set_metadata(source) + return value + + def set_metadata(source: str): + """Set lookup metadata for the setting.""" + key = env_var or config_key + CONFIG_LOOKUPS[key] = {'env_var': env_var, 'config_key': config_key, 'source': source, 'accessed': datetime.datetime.now()} + + # First, try to load from the environment variables + if env_var is not None: + val = os.getenv(env_var, None) + + if val is not None: + return try_typecasting(val, 'env') + + # Next, try to load from configuration file + if config_key is not None: + cfg_data = load_config_data() + + result = None + + # Hack to allow 'path traversal' in configuration file + for key in config_key.strip().split('.'): + + if type(cfg_data) is not dict or key not in cfg_data: + result = None + break + + result = cfg_data[key] + cfg_data = cfg_data[key] + + if result is not None: + return try_typecasting(result, 'yaml') + + # Finally, return the default value + return try_typecasting(default_value, 'default') + + +def get_boolean_setting(env_var=None, config_key=None, default_value=False): + """Helper function for retreiving a boolean configuration setting""" + + return is_true(get_setting(env_var, config_key, default_value)) + + +def get_media_dir(create=True): + """Return the absolute path for the 'media' directory (where uploaded files are stored)""" + + md = get_setting('INVENTREE_MEDIA_ROOT', 'media_root') + + if not md: + raise FileNotFoundError('INVENTREE_MEDIA_ROOT not specified') + + md = Path(md).resolve() + + if create: + md.mkdir(parents=True, exist_ok=True) + + return md + + +def get_static_dir(create=True): + """Return the absolute path for the 'static' directory (where static files are stored)""" + + sd = get_setting('INVENTREE_STATIC_ROOT', 'static_root') + + if not sd: + raise FileNotFoundError('INVENTREE_STATIC_ROOT not specified') + + sd = Path(sd).resolve() + + if create: + sd.mkdir(parents=True, exist_ok=True) + + return sd + + +def get_backup_dir(create=True): + """Return the absolute path for the backup directory""" + + bd = get_setting('INVENTREE_BACKUP_DIR', 'backup_dir') + + if not bd: + raise FileNotFoundError('INVENTREE_BACKUP_DIR not specified') + + bd = Path(bd).resolve() + + if create: + bd.mkdir(parents=True, exist_ok=True) + + return bd + + +def get_plugin_file(): + """Returns the path of the InvenTree plugins specification file. + + Note: It will be created if it does not already exist! + """ + + # Check if the plugin.txt file (specifying required plugins) is specified + plugin_file = get_setting('INVENTREE_PLUGIN_FILE', 'plugin_file') + + if not plugin_file: + # If not specified, look in the same directory as the configuration file + config_dir = get_config_file().parent + plugin_file = config_dir.joinpath('plugins.txt') + else: + # Make sure we are using a modern Path object + plugin_file = Path(plugin_file) + + if not plugin_file.exists(): + logger.warning("Plugin configuration file does not exist - creating default file") + logger.info(f"Creating plugin file at '{plugin_file}'") + ensure_dir(plugin_file.parent) + + # If opening the file fails (no write permission, for example), then this will throw an error + plugin_file.write_text("# InvenTree Plugins (uses PIP framework to install)\n\n") + + return plugin_file + + +def get_secret_key(): + """Return the secret key value which will be used by django. + + Following options are tested, in descending order of preference: + + A) Check for environment variable INVENTREE_SECRET_KEY => Use raw key data + B) Check for environment variable INVENTREE_SECRET_KEY_FILE => Load key data from file + C) Look for default key file "secret_key.txt" + D) Create "secret_key.txt" if it does not exist + """ + + # Look for environment variable + if secret_key := get_setting('INVENTREE_SECRET_KEY', 'secret_key'): + logger.info("SECRET_KEY loaded by INVENTREE_SECRET_KEY") # pragma: no cover + return secret_key + + # Look for secret key file + if secret_key_file := get_setting('INVENTREE_SECRET_KEY_FILE', 'secret_key_file'): + secret_key_file = Path(secret_key_file).resolve() + else: + # Default location for secret key file + secret_key_file = get_base_dir().joinpath("secret_key.txt").resolve() + + if not secret_key_file.exists(): + logger.info(f"Generating random key file at '{secret_key_file}'") + ensure_dir(secret_key_file.parent) + + # Create a random key file + options = string.digits + string.ascii_letters + string.punctuation + key = ''.join([random.choice(options) for i in range(100)]) + secret_key_file.write_text(key) + + logger.info(f"Loading SECRET_KEY from '{secret_key_file}'") + + key_data = secret_key_file.read_text().strip() + + return key_data + + +def get_custom_file(env_ref: str, conf_ref: str, log_ref: str, lookup_media: bool = False): + """Returns the checked path to a custom file. + + Set lookup_media to True to also search in the media folder. + """ + from django.contrib.staticfiles.storage import StaticFilesStorage + from django.core.files.storage import default_storage + + value = get_setting(env_ref, conf_ref, None) + + if not value: + return None + + static_storage = StaticFilesStorage() + + if static_storage.exists(value): + logger.info(f"Loading {log_ref} from static directory: {value}") + elif lookup_media and default_storage.exists(value): + logger.info(f"Loading {log_ref} from media directory: {value}") + else: + add_dir_str = ' or media' if lookup_media else '' + logger.warning(f"The {log_ref} file '{value}' could not be found in the static{add_dir_str} directories") + value = False + + return value diff --git a/patch/settings.py b/patch/settings.py new file mode 100644 index 0000000..1aab68d --- /dev/null +++ b/patch/settings.py @@ -0,0 +1,914 @@ +"""Django settings for InvenTree project. + +In practice the settings in this file should not be adjusted, +instead settings can be configured in the config.yaml file +located in the top level project directory. + +This allows implementation configuration to be hidden from source control, +as well as separate configuration parameters from the more complex +database setup in this file. +""" + +import logging +import os +import socket +import sys +from pathlib import Path + +import django.conf.locale +import django.core.exceptions +from django.http import Http404 +from django.utils.translation import gettext_lazy as _ + +import moneyed +import sentry_sdk +from sentry_sdk.integrations.django import DjangoIntegration + +from . import config +from .config import get_boolean_setting, get_custom_file, get_setting + +INVENTREE_NEWS_URL = 'https://inventree.org/news/feed.atom' + +# Determine if we are running in "test" mode e.g. "manage.py test" +TESTING = 'test' in sys.argv + +if TESTING: + + # Use a weaker password hasher for testing (improves testing speed) + PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher',] + + # Enable slow-test-runner + TEST_RUNNER = 'django_slowtests.testrunner.DiscoverSlowestTestsRunner' + NUM_SLOW_TESTS = 25 + + # Note: The following fix is "required" for docker build workflow + # Note: 2022-12-12 still unsure why... + if os.getenv('INVENTREE_DOCKER'): + # Ensure that sys.path includes global python libs + site_packages = '/usr/local/lib/python3.9/site-packages' + + if site_packages not in sys.path: + print("Adding missing site-packages path:", site_packages) + sys.path.append(site_packages) + +# Are environment variables manipulated by tests? Needs to be set by testing code +TESTING_ENV = False + +# New requirement for django 3.2+ +DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' + +# Build paths inside the project like this: BASE_DIR.joinpath(...) +BASE_DIR = config.get_base_dir() + +# Load configuration data +CONFIG = config.load_config_data(set_cache=True) + +# Default action is to run the system in Debug mode +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = get_boolean_setting('INVENTREE_DEBUG', 'debug', True) + +# Configure logging settings +log_level = get_setting('INVENTREE_LOG_LEVEL', 'log_level', 'WARNING') + +logging.basicConfig( + level=log_level, + format="%(asctime)s %(levelname)s %(message)s", +) + +if log_level not in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']: + log_level = 'WARNING' # pragma: no cover + +LOGGING = { + 'version': 1, + 'disable_existing_loggers': False, + 'handlers': { + 'console': { + 'class': 'logging.StreamHandler', + }, + }, + 'root': { + 'handlers': ['console'], + 'level': log_level, + }, + 'filters': { + 'require_not_maintenance_mode_503': { + '()': 'maintenance_mode.logging.RequireNotMaintenanceMode503', + }, + }, +} + +# Get a logger instance for this setup file +logger = logging.getLogger("inventree") + +# Load SECRET_KEY +SECRET_KEY = config.get_secret_key() + +# The filesystem location for served static files +STATIC_ROOT = config.get_static_dir() + +# The filesystem location for uploaded meadia files +MEDIA_ROOT = config.get_media_dir() + +# List of allowed hosts (default = allow all) +ALLOWED_HOSTS = get_setting( + "INVENTREE_ALLOWED_HOSTS", + config_key='allowed_hosts', + default_value=['*'], + typecast=list, +) + +# Cross Origin Resource Sharing (CORS) options + +# Only allow CORS access to API +CORS_URLS_REGEX = r'^/api/.*$' + +# Extract CORS options from configuration file +CORS_ORIGIN_ALLOW_ALL = get_boolean_setting( + "INVENTREE_CORS_ORIGIN_ALLOW_ALL", + config_key='cors.allow_all', + default_value=False, +) + +CORS_ORIGIN_WHITELIST = get_setting( + "INVENTREE_CORS_ORIGIN_WHITELIST", + config_key='cors.whitelist', + default_value=[], + typecast=list, +) + +# Needed for the parts importer, directly impacts the maximum parts that can be uploaded +DATA_UPLOAD_MAX_NUMBER_FIELDS = 10000 + +# Web URL endpoint for served static files +STATIC_URL = '/static/' + +STATICFILES_DIRS = [] + +# Translated Template settings +STATICFILES_I18_PREFIX = 'i18n' +STATICFILES_I18_SRC = BASE_DIR.joinpath('templates', 'js', 'translated') +STATICFILES_I18_TRG = BASE_DIR.joinpath('InvenTree', 'static_i18n') +STATICFILES_DIRS.append(STATICFILES_I18_TRG) +STATICFILES_I18_TRG = STATICFILES_I18_TRG.joinpath(STATICFILES_I18_PREFIX) + +STATFILES_I18_PROCESSORS = [ + 'InvenTree.context.status_codes', +] + +# Color Themes Directory +STATIC_COLOR_THEMES_DIR = STATIC_ROOT.joinpath('css', 'color-themes').resolve() + +# Web URL endpoint for served media files +MEDIA_URL = '/media/' + +# Database backup options +# Ref: https://django-dbbackup.readthedocs.io/en/master/configuration.html +DBBACKUP_SEND_EMAIL = False +DBBACKUP_STORAGE = get_setting( + 'INVENTREE_BACKUP_STORAGE', + 'backup_storage', + 'django.core.files.storage.FileSystemStorage' +) + +# Default backup configuration +DBBACKUP_STORAGE_OPTIONS = get_setting('INVENTREE_BACKUP_OPTIONS', 'backup_options', None) +if DBBACKUP_STORAGE_OPTIONS is None: + DBBACKUP_STORAGE_OPTIONS = { + 'location': config.get_backup_dir(), + } + +# Application definition + +INSTALLED_APPS = [ + # Admin site integration + 'django.contrib.admin', + + # InvenTree apps + 'build.apps.BuildConfig', + 'common.apps.CommonConfig', + 'company.apps.CompanyConfig', + 'label.apps.LabelConfig', + 'order.apps.OrderConfig', + 'part.apps.PartConfig', + 'report.apps.ReportConfig', + 'stock.apps.StockConfig', + 'users.apps.UsersConfig', + 'plugin.apps.PluginAppConfig', + 'InvenTree.apps.InvenTreeConfig', # InvenTree app runs last + + # Core django modules + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'user_sessions', # db user sessions + 'django.contrib.messages', + 'django.contrib.staticfiles', + 'django.contrib.sites', + + # Maintenance + 'maintenance_mode', + + # Third part add-ons + 'django_filters', # Extended filter functionality + 'rest_framework', # DRF (Django Rest Framework) + 'rest_framework.authtoken', # Token authentication for API + 'corsheaders', # Cross-origin Resource Sharing for DRF + 'crispy_forms', # Improved form rendering + 'import_export', # Import / export tables to file + 'django_cleanup.apps.CleanupConfig', # Automatically delete orphaned MEDIA files + 'mptt', # Modified Preorder Tree Traversal + 'markdownify', # Markdown template rendering + 'djmoney', # django-money integration + 'djmoney.contrib.exchange', # django-money exchange rates + 'error_report', # Error reporting in the admin interface + 'django_q', + 'formtools', # Form wizard tools + 'dbbackup', # Backups - django-dbbackup + + 'allauth', # Base app for SSO + 'allauth.account', # Extend user with accounts + 'allauth.socialaccount', # Use 'social' providers + + 'django_otp', # OTP is needed for MFA - base package + 'django_otp.plugins.otp_totp', # Time based OTP + 'django_otp.plugins.otp_static', # Backup codes + + 'allauth_2fa', # MFA flow for allauth + + 'django_ical', # For exporting calendars +] + +MIDDLEWARE = CONFIG.get('middleware', [ + 'django.middleware.security.SecurityMiddleware', + 'x_forwarded_for.middleware.XForwardedForMiddleware', + 'user_sessions.middleware.SessionMiddleware', # db user sessions + 'django.middleware.locale.LocaleMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'corsheaders.middleware.CorsMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'InvenTree.middleware.InvenTreeRemoteUserMiddleware', # Remote / proxy auth + 'django_otp.middleware.OTPMiddleware', # MFA support + 'InvenTree.middleware.CustomAllauthTwoFactorMiddleware', # Flow control for allauth + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', + 'InvenTree.middleware.AuthRequiredMiddleware', + 'InvenTree.middleware.Check2FAMiddleware', # Check if the user should be forced to use MFA + 'maintenance_mode.middleware.MaintenanceModeMiddleware', + 'InvenTree.middleware.InvenTreeExceptionProcessor', # Error reporting +]) + +AUTHENTICATION_BACKENDS = CONFIG.get('authentication_backends', [ + 'django.contrib.auth.backends.RemoteUserBackend', # proxy login + 'django.contrib.auth.backends.ModelBackend', + 'allauth.account.auth_backends.AuthenticationBackend', # SSO login via external providers +]) + +DEBUG_TOOLBAR_ENABLED = DEBUG and get_setting('INVENTREE_DEBUG_TOOLBAR', 'debug_toolbar', False) + +# If the debug toolbar is enabled, add the modules +if DEBUG_TOOLBAR_ENABLED: # pragma: no cover + logger.info("Running with DEBUG_TOOLBAR enabled") + INSTALLED_APPS.append('debug_toolbar') + MIDDLEWARE.append('debug_toolbar.middleware.DebugToolbarMiddleware') + + DEBUG_TOOLBAR_CONFIG = { + 'RESULTS_CACHE_SIZE': 100, + 'OBSERVE_REQUEST_CALLBACK': lambda x: False, + } + +# Internal IP addresses allowed to see the debug toolbar +INTERNAL_IPS = [ + '127.0.0.1', +] + +# Internal flag to determine if we are running in docker mode +DOCKER = get_boolean_setting('INVENTREE_DOCKER', default_value=False) + +if DOCKER: # pragma: no cover + # Internal IP addresses are different when running under docker + hostname, ___, ips = socket.gethostbyname_ex(socket.gethostname()) + INTERNAL_IPS = [ip[: ip.rfind(".")] + ".1" for ip in ips] + ["127.0.0.1", "10.0.2.2"] + +# Allow secure http developer server in debug mode +if DEBUG: + INSTALLED_APPS.append('sslserver') + +# InvenTree URL configuration + +# Base URL for admin pages (default="admin") +INVENTREE_ADMIN_URL = get_setting( + 'INVENTREE_ADMIN_URL', + config_key='admin_url', + default_value='admin' +) + +ROOT_URLCONF = 'InvenTree.urls' + +TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [ + BASE_DIR.joinpath('templates'), + # Allow templates in the reporting directory to be accessed + MEDIA_ROOT.joinpath('report'), + MEDIA_ROOT.joinpath('label'), + ], + 'OPTIONS': { + 'context_processors': [ + 'django.template.context_processors.debug', + 'django.template.context_processors.request', + 'django.template.context_processors.i18n', + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + # Custom InvenTree context processors + 'InvenTree.context.health_status', + 'InvenTree.context.status_codes', + 'InvenTree.context.user_roles', + ], + 'loaders': [( + 'django.template.loaders.cached.Loader', [ + 'plugin.template.PluginTemplateLoader', + 'django.template.loaders.filesystem.Loader', + 'django.template.loaders.app_directories.Loader', + ]) + ], + }, + }, +] + +if DEBUG_TOOLBAR_ENABLED: # pragma: no cover + # Note that the APP_DIRS value must be set when using debug_toolbar + # But this will kill template loading for plugins + TEMPLATES[0]['APP_DIRS'] = True + del TEMPLATES[0]['OPTIONS']['loaders'] + +REST_FRAMEWORK = { + 'EXCEPTION_HANDLER': 'InvenTree.exceptions.exception_handler', + 'DATETIME_FORMAT': '%Y-%m-%d %H:%M', + 'DEFAULT_AUTHENTICATION_CLASSES': ( + 'rest_framework.authentication.BasicAuthentication', + 'rest_framework.authentication.SessionAuthentication', + 'rest_framework.authentication.TokenAuthentication', + ), + 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination', + 'DEFAULT_PERMISSION_CLASSES': ( + 'rest_framework.permissions.IsAuthenticated', + 'rest_framework.permissions.DjangoModelPermissions', + 'InvenTree.permissions.RolePermission', + ), + 'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.coreapi.AutoSchema', + 'DEFAULT_METADATA_CLASS': 'InvenTree.metadata.InvenTreeMetadata', + 'DEFAULT_RENDERER_CLASSES': [ + 'rest_framework.renderers.JSONRenderer', + ] +} + +if DEBUG: + # Enable browsable API if in DEBUG mode + REST_FRAMEWORK['DEFAULT_RENDERER_CLASSES'].append('rest_framework.renderers.BrowsableAPIRenderer') + +WSGI_APPLICATION = 'InvenTree.wsgi.application' + +""" +Configure the database backend based on the user-specified values. + +- Primarily this configuration happens in the config.yaml file +- However there may be reason to configure the DB via environmental variables +- The following code lets the user "mix and match" database configuration +""" + +logger.debug("Configuring database backend:") + +# Extract database configuration from the config.yaml file +db_config = CONFIG.get('database', {}) + +if not db_config: + db_config = {} + +# Environment variables take preference over config file! + +db_keys = ['ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT'] + +for key in db_keys: + # First, check the environment variables + env_key = f"INVENTREE_DB_{key}" + env_var = os.environ.get(env_key, None) + + if env_var: + # Make use PORT is int + if key == 'PORT': + try: + env_var = int(env_var) + except ValueError: + logger.error(f"Invalid number for {env_key}: {env_var}") + # Override configuration value + db_config[key] = env_var + +# Check that required database configuration options are specified +required_keys = ['ENGINE', 'NAME'] + +for key in required_keys: + if key not in db_config: # pragma: no cover + error_msg = f'Missing required database configuration value {key}' + logger.error(error_msg) + + print('Error: ' + error_msg) + sys.exit(-1) + +""" +Special considerations for the database 'ENGINE' setting. +It can be specified in config.yaml (or envvar) as either (for example): +- sqlite3 +- django.db.backends.sqlite3 +- django.db.backends.postgresql +""" + +db_engine = db_config['ENGINE'].lower() + +# Correct common misspelling +if db_engine == 'sqlite': + db_engine = 'sqlite3' # pragma: no cover + +if db_engine in ['sqlite3', 'postgresql', 'mysql']: + # Prepend the required python module string + db_engine = f'django.db.backends.{db_engine}' + db_config['ENGINE'] = db_engine + +db_name = db_config['NAME'] +db_host = db_config.get('HOST', "''") + +if 'sqlite' in db_engine: + db_name = str(Path(db_name).resolve()) + db_config['NAME'] = db_name + +logger.info(f"DB_ENGINE: {db_engine}") +logger.info(f"DB_NAME: {db_name}") +logger.info(f"DB_HOST: {db_host}") + +""" +In addition to base-level database configuration, we may wish to specify specific options to the database backend +Ref: https://docs.djangoproject.com/en/3.2/ref/settings/#std:setting-OPTIONS +""" + +# 'OPTIONS' or 'options' can be specified in config.yaml +# Set useful sensible timeouts for a transactional webserver to communicate +# with its database server, that is, if the webserver is having issues +# connecting to the database server (such as a replica failover) don't sit and +# wait for possibly an hour or more, just tell the client something went wrong +# and let the client retry when they want to. +db_options = db_config.get("OPTIONS", db_config.get("options", {})) + +# Specific options for postgres backend +if "postgres" in db_engine: # pragma: no cover + from psycopg2.extensions import (ISOLATION_LEVEL_READ_COMMITTED, + ISOLATION_LEVEL_SERIALIZABLE) + + # Connection timeout + if "connect_timeout" not in db_options: + # The DB server is in the same data center, it should not take very + # long to connect to the database server + # # seconds, 2 is minium allowed by libpq + db_options["connect_timeout"] = int( + get_setting('INVENTREE_DB_TIMEOUT', 'database.timeout', 2) + ) + + # Setup TCP keepalive + # DB server is in the same DC, it should not become unresponsive for + # very long. With the defaults below we wait 5 seconds for the network + # issue to resolve itself. It it that doesn't happen whatever happened + # is probably fatal and no amount of waiting is going to fix it. + # # 0 - TCP Keepalives disabled; 1 - enabled + if "keepalives" not in db_options: + db_options["keepalives"] = int( + get_setting('INVENTREE_DB_TCP_KEEPALIVES', 'database.tcp_keepalives', 1) + ) + + # Seconds after connection is idle to send keep alive + if "keepalives_idle" not in db_options: + db_options["keepalives_idle"] = int( + get_setting('INVENTREE_DB_TCP_KEEPALIVES_IDLE', 'database.tcp_keepalives_idle', 1) + ) + + # Seconds after missing ACK to send another keep alive + if "keepalives_interval" not in db_options: + db_options["keepalives_interval"] = int( + get_setting("INVENTREE_DB_TCP_KEEPALIVES_INTERVAL", "database.tcp_keepalives_internal", "1") + ) + + # Number of missing ACKs before we close the connection + if "keepalives_count" not in db_options: + db_options["keepalives_count"] = int( + get_setting("INVENTREE_DB_TCP_KEEPALIVES_COUNT", "database.tcp_keepalives_count", "5") + ) + + # # Milliseconds for how long pending data should remain unacked + # by the remote server + # TODO: Supported starting in PSQL 11 + # "tcp_user_timeout": int(os.getenv("PGTCP_USER_TIMEOUT", "1000"), + + # Postgres's default isolation level is Read Committed which is + # normally fine, but most developers think the database server is + # actually going to do Serializable type checks on the queries to + # protect against simultaneous changes. + # https://www.postgresql.org/docs/devel/transaction-iso.html + # https://docs.djangoproject.com/en/3.2/ref/databases/#isolation-level + if "isolation_level" not in db_options: + serializable = get_boolean_setting('INVENTREE_DB_ISOLATION_SERIALIZABLE', 'database.serializable', False) + db_options["isolation_level"] = ISOLATION_LEVEL_SERIALIZABLE if serializable else ISOLATION_LEVEL_READ_COMMITTED + +# Specific options for MySql / MariaDB backend +elif "mysql" in db_engine: # pragma: no cover + # TODO TCP time outs and keepalives + + # MariaDB's default isolation level is Repeatable Read which is + # normally fine, but most developers think the database server is + # actually going to Serializable type checks on the queries to + # protect against siumltaneous changes. + # https://mariadb.com/kb/en/mariadb-transactions-and-isolation-levels-for-sql-server-users/#changing-the-isolation-level + # https://docs.djangoproject.com/en/3.2/ref/databases/#mysql-isolation-level + if "isolation_level" not in db_options: + serializable = get_boolean_setting('INVENTREE_DB_ISOLATION_SERIALIZABLE', 'database.serializable', False) + db_options["isolation_level"] = "serializable" if serializable else "read committed" + +# Specific options for sqlite backend +elif "sqlite" in db_engine: + # TODO: Verify timeouts are not an issue because no network is involved for SQLite + + # SQLite's default isolation level is Serializable due to SQLite's + # single writer implementation. Presumably as a result of this, it is + # not possible to implement any lower isolation levels in SQLite. + # https://www.sqlite.org/isolation.html + pass + +# Provide OPTIONS dict back to the database configuration dict +db_config['OPTIONS'] = db_options + +# Set testing options for the database +db_config['TEST'] = { + 'CHARSET': 'utf8', +} + +# Set collation option for mysql test database +if 'mysql' in db_engine: + db_config['TEST']['COLLATION'] = 'utf8_general_ci' # pragma: no cover + +DATABASES = { + 'default': db_config +} + +# login settings +REMOTE_LOGIN = get_boolean_setting('INVENTREE_REMOTE_LOGIN', 'remote_login_enabled', False) +REMOTE_LOGIN_HEADER = get_setting('INVENTREE_REMOTE_LOGIN_HEADER', 'remote_login_header', 'REMOTE_USER') + +# sentry.io integration for error reporting +SENTRY_ENABLED = get_boolean_setting('INVENTREE_SENTRY_ENABLED', 'sentry_enabled', False) +# Default Sentry DSN (can be overriden if user wants custom sentry integration) +INVENTREE_DSN = 'https://3928ccdba1d34895abde28031fd00100@o378676.ingest.sentry.io/6494600' +SENTRY_DSN = get_setting('INVENTREE_SENTRY_DSN', 'sentry_dsn', INVENTREE_DSN) +SENTRY_SAMPLE_RATE = float(get_setting('INVENTREE_SENTRY_SAMPLE_RATE', 'sentry_sample_rate', 0.1)) + +if SENTRY_ENABLED and SENTRY_DSN: # pragma: no cover + sentry_sdk.init( + dsn=SENTRY_DSN, + integrations=[DjangoIntegration(), ], + traces_sample_rate=1.0 if DEBUG else SENTRY_SAMPLE_RATE, + send_default_pii=True + ) + inventree_tags = { + 'testing': TESTING, + 'docker': DOCKER, + 'debug': DEBUG, + 'remote': REMOTE_LOGIN, + } + for key, val in inventree_tags.items(): + sentry_sdk.set_tag(f'inventree_{key}', val) + +# Cache configuration +cache_host = get_setting('INVENTREE_CACHE_HOST', 'cache.host', None) +cache_port = get_setting('INVENTREE_CACHE_PORT', 'cache.port', '6379', typecast=int) + +if cache_host: # pragma: no cover + # We are going to rely upon a possibly non-localhost for our cache, + # so don't wait too long for the cache as nothing in the cache should be + # irreplacable. + _cache_options = { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + "SOCKET_CONNECT_TIMEOUT": int(os.getenv("CACHE_CONNECT_TIMEOUT", "2")), + "SOCKET_TIMEOUT": int(os.getenv("CACHE_SOCKET_TIMEOUT", "2")), + "CONNECTION_POOL_KWARGS": { + "socket_keepalive": config.is_true( + os.getenv("CACHE_TCP_KEEPALIVE", "1") + ), + "socket_keepalive_options": { + socket.TCP_KEEPCNT: int( + os.getenv("CACHE_KEEPALIVES_COUNT", "5") + ), + socket.TCP_KEEPIDLE: int( + os.getenv("CACHE_KEEPALIVES_IDLE", "1") + ), + socket.TCP_KEEPINTVL: int( + os.getenv("CACHE_KEEPALIVES_INTERVAL", "1") + ), + socket.TCP_USER_TIMEOUT: int( + os.getenv("CACHE_TCP_USER_TIMEOUT", "1000") + ), + }, + }, + } + CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": f"redis://{cache_host}:{cache_port}/0", + "OPTIONS": _cache_options, + }, + } +else: + CACHES = { + "default": { + "BACKEND": "django.core.cache.backends.locmem.LocMemCache", + }, + } + +_q_worker_timeout = int(get_setting('INVENTREE_BACKGROUND_TIMEOUT', 'background.timeout', 90)) + +# django-q background worker configuration +Q_CLUSTER = { + 'name': 'InvenTree', + 'label': 'Background Tasks', + 'workers': int(get_setting('INVENTREE_BACKGROUND_WORKERS', 'background.workers', 4)), + 'timeout': _q_worker_timeout, + 'retry': min(120, _q_worker_timeout + 30), + 'max_attempts': int(get_setting('INVENTREE_BACKGROUND_MAX_ATTEMPTS', 'background.max_attempts', 5)), + 'queue_limit': 50, + 'catch_up': False, + 'bulk': 10, + 'orm': 'default', + 'cache': 'default', + 'sync': False, +} + +# Configure django-q sentry integration +if SENTRY_ENABLED and SENTRY_DSN: + Q_CLUSTER['error_reporter'] = { + 'sentry': { + 'dsn': SENTRY_DSN + } + } + +if cache_host: # pragma: no cover + # If using external redis cache, make the cache the broker for Django Q + # as well + Q_CLUSTER["django_redis"] = "worker" + +# database user sessions +SESSION_ENGINE = 'user_sessions.backends.db' +LOGOUT_REDIRECT_URL = get_setting('INVENTREE_LOGOUT_REDIRECT_URL', 'logout_redirect_url', 'index') +SILENCED_SYSTEM_CHECKS = [ + 'admin.E410', +] + +# Password validation +# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators + +AUTH_PASSWORD_VALIDATORS = [ + { + 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + }, +] + +# Extra (optional) URL validators +# See https://docs.djangoproject.com/en/2.2/ref/validators/#django.core.validators.URLValidator + +EXTRA_URL_SCHEMES = get_setting('INVENTREE_EXTRA_URL_SCHEMES', 'extra_url_schemes', []) + +if type(EXTRA_URL_SCHEMES) not in [list]: # pragma: no cover + logger.warning("extra_url_schemes not correctly formatted") + EXTRA_URL_SCHEMES = [] + +# Internationalization +# https://docs.djangoproject.com/en/dev/topics/i18n/ +LANGUAGE_CODE = get_setting('INVENTREE_LANGUAGE', 'language', 'en-us') +# Store language settings for 30 days +LANGUAGE_COOKIE_AGE = 2592000 + +# If a new language translation is supported, it must be added here +LANGUAGES = [ + ('cs', _('Czech')), + ('da', _('Danish')), + ('de', _('German')), + ('el', _('Greek')), + ('en', _('English')), + ('es', _('Spanish')), + ('es-mx', _('Spanish (Mexican)')), + ('fa', _('Farsi / Persian')), + ('fr', _('French')), + ('he', _('Hebrew')), + ('hu', _('Hungarian')), + ('it', _('Italian')), + ('ja', _('Japanese')), + ('ko', _('Korean')), + ('nl', _('Dutch')), + ('no', _('Norwegian')), + ('pl', _('Polish')), + ('pt', _('Portuguese')), + ('pt-BR', _('Portuguese (Brazilian)')), + ('ru', _('Russian')), + ('sl', _('Slovenian')), + ('sv', _('Swedish')), + ('th', _('Thai')), + ('tr', _('Turkish')), + ('vi', _('Vietnamese')), + ('zh-hans', _('Chinese')), +] + +# Testing interface translations +if get_boolean_setting('TEST_TRANSLATIONS', default_value=False): # pragma: no cover + # Set default language + LANGUAGE_CODE = 'xx' + + # Add to language catalog + LANGUAGES.append(('xx', 'Test')) + + # Add custom languages not provided by Django + EXTRA_LANG_INFO = { + 'xx': { + 'code': 'xx', + 'name': 'Test', + 'name_local': 'Test' + }, + } + LANG_INFO = dict(django.conf.locale.LANG_INFO, **EXTRA_LANG_INFO) + django.conf.locale.LANG_INFO = LANG_INFO + +# Currencies available for use +CURRENCIES = get_setting( + 'INVENTREE_CURRENCIES', 'currencies', + ['AUD', 'CAD', 'CNY', 'EUR', 'GBP', 'JPY', 'NZD', 'USD'], + typecast=list, +) + +# Maximum number of decimal places for currency rendering +CURRENCY_DECIMAL_PLACES = 6 + +# Check that each provided currency is supported +for currency in CURRENCIES: + if currency not in moneyed.CURRENCIES: # pragma: no cover + logger.error(f"Currency code '{currency}' is not supported") + sys.exit(1) + +# Custom currency exchange backend +EXCHANGE_BACKEND = 'InvenTree.exchange.InvenTreeExchange' + +# Email configuration options +EMAIL_BACKEND = get_setting('INVENTREE_EMAIL_BACKEND', 'email.backend', 'django.core.mail.backends.smtp.EmailBackend') +EMAIL_HOST = get_setting('INVENTREE_EMAIL_HOST', 'email.host', '') +EMAIL_PORT = get_setting('INVENTREE_EMAIL_PORT', 'email.port', 25, typecast=int) +EMAIL_HOST_USER = get_setting('INVENTREE_EMAIL_USERNAME', 'email.username', '') +EMAIL_HOST_PASSWORD = get_setting('INVENTREE_EMAIL_PASSWORD', 'email.password', '') +EMAIL_SUBJECT_PREFIX = get_setting('INVENTREE_EMAIL_PREFIX', 'email.prefix', '[InvenTree] ') +EMAIL_USE_TLS = get_boolean_setting('INVENTREE_EMAIL_TLS', 'email.tls', False) +EMAIL_USE_SSL = get_boolean_setting('INVENTREE_EMAIL_SSL', 'email.ssl', False) + +DEFAULT_FROM_EMAIL = get_setting('INVENTREE_EMAIL_SENDER', 'email.sender', '') + +EMAIL_USE_LOCALTIME = False +EMAIL_TIMEOUT = 60 + +LOCALE_PATHS = ( + BASE_DIR.joinpath('locale/'), +) + +TIME_ZONE = get_setting('INVENTREE_TIMEZONE', 'timezone', 'UTC') + +USE_I18N = True + +USE_L10N = True + +# Do not use native timezone support in "test" mode +# It generates a *lot* of cruft in the logs +if not TESTING: + USE_TZ = True # pragma: no cover + +DATE_INPUT_FORMATS = [ + "%Y-%m-%d", +] + +# crispy forms use the bootstrap templates +CRISPY_TEMPLATE_PACK = 'bootstrap4' + +# Use database transactions when importing / exporting data +IMPORT_EXPORT_USE_TRANSACTIONS = True + +SITE_ID = 1 + +# Load the allauth social backends +SOCIAL_BACKENDS = get_setting('INVENTREE_SOCIAL_BACKENDS', 'social_backends', [], typecast=list) + +for app in SOCIAL_BACKENDS: + INSTALLED_APPS.append(app) # pragma: no cover + +SOCIALACCOUNT_PROVIDERS = get_setting('INVENTREE_SOCIAL_PROVIDERS', 'social_providers', None, typecast=dict) + +SOCIALACCOUNT_STORE_TOKENS = True + +# settings for allauth +ACCOUNT_EMAIL_CONFIRMATION_EXPIRE_DAYS = get_setting('INVENTREE_LOGIN_CONFIRM_DAYS', 'login_confirm_days', 3, typecast=int) +ACCOUNT_LOGIN_ATTEMPTS_LIMIT = get_setting('INVENTREE_LOGIN_ATTEMPTS', 'login_attempts', 5, typecast=int) +ACCOUNT_DEFAULT_HTTP_PROTOCOL = get_setting('INVENTREE_LOGIN_DEFAULT_HTTP_PROTOCOL', 'login_default_protocol', 'http') +ACCOUNT_LOGOUT_ON_PASSWORD_CHANGE = True +ACCOUNT_PREVENT_ENUMERATION = True + +# override forms / adapters +ACCOUNT_FORMS = { + 'login': 'allauth.account.forms.LoginForm', + 'signup': 'InvenTree.forms.CustomSignupForm', + 'add_email': 'allauth.account.forms.AddEmailForm', + 'change_password': 'allauth.account.forms.ChangePasswordForm', + 'set_password': 'allauth.account.forms.SetPasswordForm', + 'reset_password': 'allauth.account.forms.ResetPasswordForm', + 'reset_password_from_key': 'allauth.account.forms.ResetPasswordKeyForm', + 'disconnect': 'allauth.socialaccount.forms.DisconnectForm', +} + +SOCIALACCOUNT_ADAPTER = 'InvenTree.forms.CustomSocialAccountAdapter' +ACCOUNT_ADAPTER = 'InvenTree.forms.CustomAccountAdapter' + +# Markdownify configuration +# Ref: https://django-markdownify.readthedocs.io/en/latest/settings.html + +MARKDOWNIFY = { + 'default': { + 'BLEACH': True, + 'WHITELIST_ATTRS': [ + 'href', + 'src', + 'alt', + ], + 'MARKDOWN_EXTENSIONS': [ + 'markdown.extensions.extra' + ], + 'WHITELIST_TAGS': [ + 'a', + 'abbr', + 'b', + 'blockquote', + 'em', + 'h1', 'h2', 'h3', + 'i', + 'img', + 'li', + 'ol', + 'p', + 'strong', + 'ul', + 'table', + 'thead', + 'tbody', + 'th', + 'tr', + 'td' + ], + } +} + +# Ignore these error typeps for in-database error logging +IGNORED_ERRORS = [ + Http404, + django.core.exceptions.PermissionDenied, +] + +# Maintenance mode +MAINTENANCE_MODE_RETRY_AFTER = 60 +MAINTENANCE_MODE_STATE_BACKEND = 'maintenance_mode.backends.StaticStorageBackend' + +# Are plugins enabled? +PLUGINS_ENABLED = get_boolean_setting('INVENTREE_PLUGINS_ENABLED', 'plugins_enabled', False) + +PLUGIN_FILE = config.get_plugin_file() + +# Plugin test settings +PLUGIN_TESTING = get_setting('INVENTREE_PLUGIN_TESTING', 'PLUGIN_TESTING', TESTING) # Are plugins beeing tested? +PLUGIN_TESTING_SETUP = get_setting('INVENTREE_PLUGIN_TESTING_SETUP', 'PLUGIN_TESTING_SETUP', False) # Load plugins from setup hooks in testing? +PLUGIN_TESTING_EVENTS = False # Flag if events are tested right now +PLUGIN_RETRY = get_setting('INVENTREE_PLUGIN_RETRY', 'PLUGIN_RETRY', 5) # How often should plugin loading be tried? +PLUGIN_FILE_CHECKED = False # Was the plugin file checked? + +# User interface customization values +CUSTOM_LOGO = get_custom_file('INVENTREE_CUSTOM_LOGO', 'customize.logo', 'custom logo', lookup_media=True) +CUSTOM_SPLASH = get_custom_file('INVENTREE_CUSTOM_SPLASH', 'customize.splash', 'custom splash') + +CUSTOMIZE = get_setting('INVENTREE_CUSTOMIZE', 'customize', {}) +if DEBUG: + logger.info("InvenTree running with DEBUG enabled") + +logger.info(f"MEDIA_ROOT: '{MEDIA_ROOT}'") +logger.info(f"STATIC_ROOT: '{STATIC_ROOT}'") diff --git a/sample.env b/sample.env new file mode 100644 index 0000000..9fff491 --- /dev/null +++ b/sample.env @@ -0,0 +1,68 @@ +# InvenTree environment variables for a postgresql production setup +COMPOSE_PROJECT_NAME=inventree + +# Location of persistent database data (stored external to the docker containers) +# Note: You *must* un-comment this line, and point it to a path on your local machine + +# e.g. Linux +INVENTREE_EXT_VOLUME=data + +# e.g. Windows (docker desktop) +#INVENTREE_EXT_VOLUME=c:/Users/me/inventree-data + +# Default web port for the InvenTree server +INVENTREE_WEB_PORT=8080 + +# Ensure debug is false for a production setup +INVENTREE_DEBUG=False +INVENTREE_LOG_LEVEL=WARNING + +# Database configuration options +# Note: The example setup is for a PostgreSQL database +INVENTREE_DB_ENGINE=postgresql +INVENTREE_DB_NAME=inventree +INVENTREE_DB_HOST=inventree-db +INVENTREE_DB_PORT=5432 + +# Redis cache setup (disabled by default) +# Un-comment the following lines to enable Redis cache +# Note that you will also have to run docker-compose with the --profile redis command +# Refer to settings.py for other cache options +#INVENTREE_CACHE_HOST=inventree-cache +#INVENTREE_CACHE_PORT=6379 + +# Options for gunicorn server +INVENTREE_GUNICORN_TIMEOUT=30 + +# Enable custom plugins? +INVENTREE_PLUGINS_ENABLED=False + +# Image tag that should be used +INVENTREE_IMAGE=inventree/inventree:0.10.1 +REDIS_IMAGE=redis:7.0-alpine +NGINX_IMAGE=nginxinc/nginx-unprivileged:stable-alpine +# Postgres image must match version of pgdump in inventree image +POSTGRES_IMAGE=postgres:13-alpine + +# InvenTree admin account details +# make sure to use secure credentials these lines to auto-create an admin acount +INVENTREE_ADMIN_USER=admin +INVENTREE_ADMIN_PASSWORD=password +INVENTREE_ADMIN_EMAIL=admin@inventree.example + +# Database credentials - These must be configured before running +# Change from the default values! +INVENTREE_DB_USER=inventree +INVENTREE_DB_PASSWORD=password + +# Django configuration +INVENTREE_SECRET_KEY=some-secret-key +ALLOWED_HOSTS=inventree.example.com,www.inventree.example.com + +# SSO Config +INVENTREE_SOCIAL_BACKENDS=allauth.socialaccount.providers.keycloak + +HKNG_KEYCLOAK_URL=https://keycloak.example.com +HKNG_KEYCLOAK_REALM=master +HKNG_KEYCLOAK_CLIENT_ID=example-client +HKNG_KEYCLOAK_CLIENT_SECRET=example-secret From 7fe2d91d04437e22d0b26dee41b6ae5212a6d92e Mon Sep 17 00:00:00 2001 From: Philipp Date: Wed, 17 Sep 2025 15:26:13 +0200 Subject: [PATCH 14/24] remove unused variables --- Makefile | 3 --- docker-compose.yml | 3 ++- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index 4abc95c..a152205 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,3 @@ -CONFIG_FILE = conf/config.env -SECRET_FILE = conf/secrets.env - COMPOSE_CMD ?= docker compose # use unix:///run/docker.sock for docker socket, unix://${XDG_RUNTIME_DIR}/podman/podman.sock for podman DOCKER_HOST ?= unix:///run/docker.sock diff --git a/docker-compose.yml b/docker-compose.yml index c7957b7..13d476b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -100,9 +100,10 @@ services: volumes: # Data volume must map to /home/inventree/data - inventree_data:/home/inventree/data:z + - ./plugins:/home/inventree/InvenTree/plugins:z # ugly backport of 0.11.0 features, to be removed - ./patch/settings.py:/home/inventree/InvenTree/InvenTree/settings.py:ro,Z - - ./patch/config.py:/home/inventree/InvenTree/InvenTree/config.py:zo,Z + - ./patch/config.py:/home/inventree/InvenTree/InvenTree/config.py:ro,Z restart: unless-stopped # Background worker process handles long-running or periodic tasks From 65bf19ca7f54494c38c2a51d9b4e66d35d76c0d0 Mon Sep 17 00:00:00 2001 From: Philipp Date: Wed, 17 Sep 2025 15:26:15 +0200 Subject: [PATCH 15/24] cleanup: Remove fixes for pre 0.11.X releases --- docker-compose.yml | 3 - patch/config.py | 347 ----------------- patch/settings.py | 914 --------------------------------------------- sample.env | 2 +- 4 files changed, 1 insertion(+), 1265 deletions(-) delete mode 100644 patch/config.py delete mode 100644 patch/settings.py diff --git a/docker-compose.yml b/docker-compose.yml index 13d476b..6d6dd69 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -101,9 +101,6 @@ services: # Data volume must map to /home/inventree/data - inventree_data:/home/inventree/data:z - ./plugins:/home/inventree/InvenTree/plugins:z - # ugly backport of 0.11.0 features, to be removed - - ./patch/settings.py:/home/inventree/InvenTree/InvenTree/settings.py:ro,Z - - ./patch/config.py:/home/inventree/InvenTree/InvenTree/config.py:ro,Z restart: unless-stopped # Background worker process handles long-running or periodic tasks diff --git a/patch/config.py b/patch/config.py deleted file mode 100644 index 720e7bd..0000000 --- a/patch/config.py +++ /dev/null @@ -1,347 +0,0 @@ -"""Helper functions for loading InvenTree configuration options.""" - -import datetime -import json -import logging -import os -import random -import shutil -import string -from pathlib import Path - -logger = logging.getLogger('inventree') -CONFIG_DATA = None -CONFIG_LOOKUPS = {} - - -def to_list(value, delimiter=','): - """Take a configuration setting and make sure it is a list. - - For example, we might have a configuration setting taken from the .config file, - which is already a list. - - However, the same setting may be specified via an environment variable, - using a comma delimited string! - """ - - if type(value) in [list, tuple]: - return value - - # Otherwise, force string value - value = str(value) - - return [x.strip() for x in value.split(delimiter)] - - -def to_dict(value): - """Take a configuration setting and make sure it is a dict. - - For example, we might have a configuration setting taken from the .config file, - which is already an object/dict. - - However, the same setting may be specified via an environment variable, - using a valid JSON string! - """ - if value is None: - return {} - - if type(value) == dict: - return value - - try: - return json.loads(value) - except Exception as error: - logger.error(f"Failed to parse value '{value}' as JSON with error {error}. Ensure value is a valid JSON string.") - return {} - - -def is_true(x): - """Shortcut function to determine if a value "looks" like a boolean""" - return str(x).strip().lower() in ['1', 'y', 'yes', 't', 'true', 'on'] - - -def get_base_dir() -> Path: - """Returns the base (top-level) InvenTree directory.""" - return Path(__file__).parent.parent.resolve() - - -def ensure_dir(path: Path) -> None: - """Ensure that a directory exists. - - If it does not exist, create it. - """ - - if not path.exists(): - path.mkdir(parents=True, exist_ok=True) - - -def get_config_file(create=True) -> Path: - """Returns the path of the InvenTree configuration file. - - Note: It will be created it if does not already exist! - """ - base_dir = get_base_dir() - - cfg_filename = os.getenv('INVENTREE_CONFIG_FILE') - - if cfg_filename: - cfg_filename = Path(cfg_filename.strip()).resolve() - else: - # Config file is *not* specified - use the default - cfg_filename = base_dir.joinpath('config.yaml').resolve() - - if not cfg_filename.exists() and create: - print("InvenTree configuration file 'config.yaml' not found - creating default file") - ensure_dir(cfg_filename.parent) - - cfg_template = base_dir.joinpath("config_template.yaml") - shutil.copyfile(cfg_template, cfg_filename) - print(f"Created config file {cfg_filename}") - - return cfg_filename - - -def load_config_data(set_cache: bool = False) -> map: - """Load configuration data from the config file. - - Arguments: - set_cache(bool): If True, the configuration data will be cached for future use after load. - """ - global CONFIG_DATA - - # use cache if populated - # skip cache if cache should be set - if CONFIG_DATA is not None and not set_cache: - return CONFIG_DATA - - import yaml - - cfg_file = get_config_file() - - with open(cfg_file, 'r') as cfg: - data = yaml.safe_load(cfg) - - # Set the cache if requested - if set_cache: - CONFIG_DATA = data - - return data - - -def get_setting(env_var=None, config_key=None, default_value=None, typecast=None): - """Helper function for retrieving a configuration setting value. - - - First preference is to look for the environment variable - - Second preference is to look for the value of the settings file - - Third preference is the default value - - Arguments: - env_var: Name of the environment variable e.g. 'INVENTREE_STATIC_ROOT' - config_key: Key to lookup in the configuration file - default_value: Value to return if first two options are not provided - typecast: Function to use for typecasting the value - """ - def try_typecasting(value, source: str): - """Attempt to typecast the value""" - - # Force 'list' of strings - if typecast is list: - value = to_list(value) - - # Valid JSON string is required - elif typecast is dict: - value = to_dict(value) - - elif typecast is not None: - # Try to typecast the value - try: - val = typecast(value) - set_metadata(source) - return val - except Exception as error: - logger.error(f"Failed to typecast '{env_var}' with value '{value}' to type '{typecast}' with error {error}") - - set_metadata(source) - return value - - def set_metadata(source: str): - """Set lookup metadata for the setting.""" - key = env_var or config_key - CONFIG_LOOKUPS[key] = {'env_var': env_var, 'config_key': config_key, 'source': source, 'accessed': datetime.datetime.now()} - - # First, try to load from the environment variables - if env_var is not None: - val = os.getenv(env_var, None) - - if val is not None: - return try_typecasting(val, 'env') - - # Next, try to load from configuration file - if config_key is not None: - cfg_data = load_config_data() - - result = None - - # Hack to allow 'path traversal' in configuration file - for key in config_key.strip().split('.'): - - if type(cfg_data) is not dict or key not in cfg_data: - result = None - break - - result = cfg_data[key] - cfg_data = cfg_data[key] - - if result is not None: - return try_typecasting(result, 'yaml') - - # Finally, return the default value - return try_typecasting(default_value, 'default') - - -def get_boolean_setting(env_var=None, config_key=None, default_value=False): - """Helper function for retreiving a boolean configuration setting""" - - return is_true(get_setting(env_var, config_key, default_value)) - - -def get_media_dir(create=True): - """Return the absolute path for the 'media' directory (where uploaded files are stored)""" - - md = get_setting('INVENTREE_MEDIA_ROOT', 'media_root') - - if not md: - raise FileNotFoundError('INVENTREE_MEDIA_ROOT not specified') - - md = Path(md).resolve() - - if create: - md.mkdir(parents=True, exist_ok=True) - - return md - - -def get_static_dir(create=True): - """Return the absolute path for the 'static' directory (where static files are stored)""" - - sd = get_setting('INVENTREE_STATIC_ROOT', 'static_root') - - if not sd: - raise FileNotFoundError('INVENTREE_STATIC_ROOT not specified') - - sd = Path(sd).resolve() - - if create: - sd.mkdir(parents=True, exist_ok=True) - - return sd - - -def get_backup_dir(create=True): - """Return the absolute path for the backup directory""" - - bd = get_setting('INVENTREE_BACKUP_DIR', 'backup_dir') - - if not bd: - raise FileNotFoundError('INVENTREE_BACKUP_DIR not specified') - - bd = Path(bd).resolve() - - if create: - bd.mkdir(parents=True, exist_ok=True) - - return bd - - -def get_plugin_file(): - """Returns the path of the InvenTree plugins specification file. - - Note: It will be created if it does not already exist! - """ - - # Check if the plugin.txt file (specifying required plugins) is specified - plugin_file = get_setting('INVENTREE_PLUGIN_FILE', 'plugin_file') - - if not plugin_file: - # If not specified, look in the same directory as the configuration file - config_dir = get_config_file().parent - plugin_file = config_dir.joinpath('plugins.txt') - else: - # Make sure we are using a modern Path object - plugin_file = Path(plugin_file) - - if not plugin_file.exists(): - logger.warning("Plugin configuration file does not exist - creating default file") - logger.info(f"Creating plugin file at '{plugin_file}'") - ensure_dir(plugin_file.parent) - - # If opening the file fails (no write permission, for example), then this will throw an error - plugin_file.write_text("# InvenTree Plugins (uses PIP framework to install)\n\n") - - return plugin_file - - -def get_secret_key(): - """Return the secret key value which will be used by django. - - Following options are tested, in descending order of preference: - - A) Check for environment variable INVENTREE_SECRET_KEY => Use raw key data - B) Check for environment variable INVENTREE_SECRET_KEY_FILE => Load key data from file - C) Look for default key file "secret_key.txt" - D) Create "secret_key.txt" if it does not exist - """ - - # Look for environment variable - if secret_key := get_setting('INVENTREE_SECRET_KEY', 'secret_key'): - logger.info("SECRET_KEY loaded by INVENTREE_SECRET_KEY") # pragma: no cover - return secret_key - - # Look for secret key file - if secret_key_file := get_setting('INVENTREE_SECRET_KEY_FILE', 'secret_key_file'): - secret_key_file = Path(secret_key_file).resolve() - else: - # Default location for secret key file - secret_key_file = get_base_dir().joinpath("secret_key.txt").resolve() - - if not secret_key_file.exists(): - logger.info(f"Generating random key file at '{secret_key_file}'") - ensure_dir(secret_key_file.parent) - - # Create a random key file - options = string.digits + string.ascii_letters + string.punctuation - key = ''.join([random.choice(options) for i in range(100)]) - secret_key_file.write_text(key) - - logger.info(f"Loading SECRET_KEY from '{secret_key_file}'") - - key_data = secret_key_file.read_text().strip() - - return key_data - - -def get_custom_file(env_ref: str, conf_ref: str, log_ref: str, lookup_media: bool = False): - """Returns the checked path to a custom file. - - Set lookup_media to True to also search in the media folder. - """ - from django.contrib.staticfiles.storage import StaticFilesStorage - from django.core.files.storage import default_storage - - value = get_setting(env_ref, conf_ref, None) - - if not value: - return None - - static_storage = StaticFilesStorage() - - if static_storage.exists(value): - logger.info(f"Loading {log_ref} from static directory: {value}") - elif lookup_media and default_storage.exists(value): - logger.info(f"Loading {log_ref} from media directory: {value}") - else: - add_dir_str = ' or media' if lookup_media else '' - logger.warning(f"The {log_ref} file '{value}' could not be found in the static{add_dir_str} directories") - value = False - - return value diff --git a/patch/settings.py b/patch/settings.py deleted file mode 100644 index 1aab68d..0000000 --- a/patch/settings.py +++ /dev/null @@ -1,914 +0,0 @@ -"""Django settings for InvenTree project. - -In practice the settings in this file should not be adjusted, -instead settings can be configured in the config.yaml file -located in the top level project directory. - -This allows implementation configuration to be hidden from source control, -as well as separate configuration parameters from the more complex -database setup in this file. -""" - -import logging -import os -import socket -import sys -from pathlib import Path - -import django.conf.locale -import django.core.exceptions -from django.http import Http404 -from django.utils.translation import gettext_lazy as _ - -import moneyed -import sentry_sdk -from sentry_sdk.integrations.django import DjangoIntegration - -from . import config -from .config import get_boolean_setting, get_custom_file, get_setting - -INVENTREE_NEWS_URL = 'https://inventree.org/news/feed.atom' - -# Determine if we are running in "test" mode e.g. "manage.py test" -TESTING = 'test' in sys.argv - -if TESTING: - - # Use a weaker password hasher for testing (improves testing speed) - PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher',] - - # Enable slow-test-runner - TEST_RUNNER = 'django_slowtests.testrunner.DiscoverSlowestTestsRunner' - NUM_SLOW_TESTS = 25 - - # Note: The following fix is "required" for docker build workflow - # Note: 2022-12-12 still unsure why... - if os.getenv('INVENTREE_DOCKER'): - # Ensure that sys.path includes global python libs - site_packages = '/usr/local/lib/python3.9/site-packages' - - if site_packages not in sys.path: - print("Adding missing site-packages path:", site_packages) - sys.path.append(site_packages) - -# Are environment variables manipulated by tests? Needs to be set by testing code -TESTING_ENV = False - -# New requirement for django 3.2+ -DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' - -# Build paths inside the project like this: BASE_DIR.joinpath(...) -BASE_DIR = config.get_base_dir() - -# Load configuration data -CONFIG = config.load_config_data(set_cache=True) - -# Default action is to run the system in Debug mode -# SECURITY WARNING: don't run with debug turned on in production! -DEBUG = get_boolean_setting('INVENTREE_DEBUG', 'debug', True) - -# Configure logging settings -log_level = get_setting('INVENTREE_LOG_LEVEL', 'log_level', 'WARNING') - -logging.basicConfig( - level=log_level, - format="%(asctime)s %(levelname)s %(message)s", -) - -if log_level not in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']: - log_level = 'WARNING' # pragma: no cover - -LOGGING = { - 'version': 1, - 'disable_existing_loggers': False, - 'handlers': { - 'console': { - 'class': 'logging.StreamHandler', - }, - }, - 'root': { - 'handlers': ['console'], - 'level': log_level, - }, - 'filters': { - 'require_not_maintenance_mode_503': { - '()': 'maintenance_mode.logging.RequireNotMaintenanceMode503', - }, - }, -} - -# Get a logger instance for this setup file -logger = logging.getLogger("inventree") - -# Load SECRET_KEY -SECRET_KEY = config.get_secret_key() - -# The filesystem location for served static files -STATIC_ROOT = config.get_static_dir() - -# The filesystem location for uploaded meadia files -MEDIA_ROOT = config.get_media_dir() - -# List of allowed hosts (default = allow all) -ALLOWED_HOSTS = get_setting( - "INVENTREE_ALLOWED_HOSTS", - config_key='allowed_hosts', - default_value=['*'], - typecast=list, -) - -# Cross Origin Resource Sharing (CORS) options - -# Only allow CORS access to API -CORS_URLS_REGEX = r'^/api/.*$' - -# Extract CORS options from configuration file -CORS_ORIGIN_ALLOW_ALL = get_boolean_setting( - "INVENTREE_CORS_ORIGIN_ALLOW_ALL", - config_key='cors.allow_all', - default_value=False, -) - -CORS_ORIGIN_WHITELIST = get_setting( - "INVENTREE_CORS_ORIGIN_WHITELIST", - config_key='cors.whitelist', - default_value=[], - typecast=list, -) - -# Needed for the parts importer, directly impacts the maximum parts that can be uploaded -DATA_UPLOAD_MAX_NUMBER_FIELDS = 10000 - -# Web URL endpoint for served static files -STATIC_URL = '/static/' - -STATICFILES_DIRS = [] - -# Translated Template settings -STATICFILES_I18_PREFIX = 'i18n' -STATICFILES_I18_SRC = BASE_DIR.joinpath('templates', 'js', 'translated') -STATICFILES_I18_TRG = BASE_DIR.joinpath('InvenTree', 'static_i18n') -STATICFILES_DIRS.append(STATICFILES_I18_TRG) -STATICFILES_I18_TRG = STATICFILES_I18_TRG.joinpath(STATICFILES_I18_PREFIX) - -STATFILES_I18_PROCESSORS = [ - 'InvenTree.context.status_codes', -] - -# Color Themes Directory -STATIC_COLOR_THEMES_DIR = STATIC_ROOT.joinpath('css', 'color-themes').resolve() - -# Web URL endpoint for served media files -MEDIA_URL = '/media/' - -# Database backup options -# Ref: https://django-dbbackup.readthedocs.io/en/master/configuration.html -DBBACKUP_SEND_EMAIL = False -DBBACKUP_STORAGE = get_setting( - 'INVENTREE_BACKUP_STORAGE', - 'backup_storage', - 'django.core.files.storage.FileSystemStorage' -) - -# Default backup configuration -DBBACKUP_STORAGE_OPTIONS = get_setting('INVENTREE_BACKUP_OPTIONS', 'backup_options', None) -if DBBACKUP_STORAGE_OPTIONS is None: - DBBACKUP_STORAGE_OPTIONS = { - 'location': config.get_backup_dir(), - } - -# Application definition - -INSTALLED_APPS = [ - # Admin site integration - 'django.contrib.admin', - - # InvenTree apps - 'build.apps.BuildConfig', - 'common.apps.CommonConfig', - 'company.apps.CompanyConfig', - 'label.apps.LabelConfig', - 'order.apps.OrderConfig', - 'part.apps.PartConfig', - 'report.apps.ReportConfig', - 'stock.apps.StockConfig', - 'users.apps.UsersConfig', - 'plugin.apps.PluginAppConfig', - 'InvenTree.apps.InvenTreeConfig', # InvenTree app runs last - - # Core django modules - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'user_sessions', # db user sessions - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'django.contrib.sites', - - # Maintenance - 'maintenance_mode', - - # Third part add-ons - 'django_filters', # Extended filter functionality - 'rest_framework', # DRF (Django Rest Framework) - 'rest_framework.authtoken', # Token authentication for API - 'corsheaders', # Cross-origin Resource Sharing for DRF - 'crispy_forms', # Improved form rendering - 'import_export', # Import / export tables to file - 'django_cleanup.apps.CleanupConfig', # Automatically delete orphaned MEDIA files - 'mptt', # Modified Preorder Tree Traversal - 'markdownify', # Markdown template rendering - 'djmoney', # django-money integration - 'djmoney.contrib.exchange', # django-money exchange rates - 'error_report', # Error reporting in the admin interface - 'django_q', - 'formtools', # Form wizard tools - 'dbbackup', # Backups - django-dbbackup - - 'allauth', # Base app for SSO - 'allauth.account', # Extend user with accounts - 'allauth.socialaccount', # Use 'social' providers - - 'django_otp', # OTP is needed for MFA - base package - 'django_otp.plugins.otp_totp', # Time based OTP - 'django_otp.plugins.otp_static', # Backup codes - - 'allauth_2fa', # MFA flow for allauth - - 'django_ical', # For exporting calendars -] - -MIDDLEWARE = CONFIG.get('middleware', [ - 'django.middleware.security.SecurityMiddleware', - 'x_forwarded_for.middleware.XForwardedForMiddleware', - 'user_sessions.middleware.SessionMiddleware', # db user sessions - 'django.middleware.locale.LocaleMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'corsheaders.middleware.CorsMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'InvenTree.middleware.InvenTreeRemoteUserMiddleware', # Remote / proxy auth - 'django_otp.middleware.OTPMiddleware', # MFA support - 'InvenTree.middleware.CustomAllauthTwoFactorMiddleware', # Flow control for allauth - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', - 'InvenTree.middleware.AuthRequiredMiddleware', - 'InvenTree.middleware.Check2FAMiddleware', # Check if the user should be forced to use MFA - 'maintenance_mode.middleware.MaintenanceModeMiddleware', - 'InvenTree.middleware.InvenTreeExceptionProcessor', # Error reporting -]) - -AUTHENTICATION_BACKENDS = CONFIG.get('authentication_backends', [ - 'django.contrib.auth.backends.RemoteUserBackend', # proxy login - 'django.contrib.auth.backends.ModelBackend', - 'allauth.account.auth_backends.AuthenticationBackend', # SSO login via external providers -]) - -DEBUG_TOOLBAR_ENABLED = DEBUG and get_setting('INVENTREE_DEBUG_TOOLBAR', 'debug_toolbar', False) - -# If the debug toolbar is enabled, add the modules -if DEBUG_TOOLBAR_ENABLED: # pragma: no cover - logger.info("Running with DEBUG_TOOLBAR enabled") - INSTALLED_APPS.append('debug_toolbar') - MIDDLEWARE.append('debug_toolbar.middleware.DebugToolbarMiddleware') - - DEBUG_TOOLBAR_CONFIG = { - 'RESULTS_CACHE_SIZE': 100, - 'OBSERVE_REQUEST_CALLBACK': lambda x: False, - } - -# Internal IP addresses allowed to see the debug toolbar -INTERNAL_IPS = [ - '127.0.0.1', -] - -# Internal flag to determine if we are running in docker mode -DOCKER = get_boolean_setting('INVENTREE_DOCKER', default_value=False) - -if DOCKER: # pragma: no cover - # Internal IP addresses are different when running under docker - hostname, ___, ips = socket.gethostbyname_ex(socket.gethostname()) - INTERNAL_IPS = [ip[: ip.rfind(".")] + ".1" for ip in ips] + ["127.0.0.1", "10.0.2.2"] - -# Allow secure http developer server in debug mode -if DEBUG: - INSTALLED_APPS.append('sslserver') - -# InvenTree URL configuration - -# Base URL for admin pages (default="admin") -INVENTREE_ADMIN_URL = get_setting( - 'INVENTREE_ADMIN_URL', - config_key='admin_url', - default_value='admin' -) - -ROOT_URLCONF = 'InvenTree.urls' - -TEMPLATES = [ - { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [ - BASE_DIR.joinpath('templates'), - # Allow templates in the reporting directory to be accessed - MEDIA_ROOT.joinpath('report'), - MEDIA_ROOT.joinpath('label'), - ], - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'django.template.context_processors.i18n', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', - # Custom InvenTree context processors - 'InvenTree.context.health_status', - 'InvenTree.context.status_codes', - 'InvenTree.context.user_roles', - ], - 'loaders': [( - 'django.template.loaders.cached.Loader', [ - 'plugin.template.PluginTemplateLoader', - 'django.template.loaders.filesystem.Loader', - 'django.template.loaders.app_directories.Loader', - ]) - ], - }, - }, -] - -if DEBUG_TOOLBAR_ENABLED: # pragma: no cover - # Note that the APP_DIRS value must be set when using debug_toolbar - # But this will kill template loading for plugins - TEMPLATES[0]['APP_DIRS'] = True - del TEMPLATES[0]['OPTIONS']['loaders'] - -REST_FRAMEWORK = { - 'EXCEPTION_HANDLER': 'InvenTree.exceptions.exception_handler', - 'DATETIME_FORMAT': '%Y-%m-%d %H:%M', - 'DEFAULT_AUTHENTICATION_CLASSES': ( - 'rest_framework.authentication.BasicAuthentication', - 'rest_framework.authentication.SessionAuthentication', - 'rest_framework.authentication.TokenAuthentication', - ), - 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination', - 'DEFAULT_PERMISSION_CLASSES': ( - 'rest_framework.permissions.IsAuthenticated', - 'rest_framework.permissions.DjangoModelPermissions', - 'InvenTree.permissions.RolePermission', - ), - 'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.coreapi.AutoSchema', - 'DEFAULT_METADATA_CLASS': 'InvenTree.metadata.InvenTreeMetadata', - 'DEFAULT_RENDERER_CLASSES': [ - 'rest_framework.renderers.JSONRenderer', - ] -} - -if DEBUG: - # Enable browsable API if in DEBUG mode - REST_FRAMEWORK['DEFAULT_RENDERER_CLASSES'].append('rest_framework.renderers.BrowsableAPIRenderer') - -WSGI_APPLICATION = 'InvenTree.wsgi.application' - -""" -Configure the database backend based on the user-specified values. - -- Primarily this configuration happens in the config.yaml file -- However there may be reason to configure the DB via environmental variables -- The following code lets the user "mix and match" database configuration -""" - -logger.debug("Configuring database backend:") - -# Extract database configuration from the config.yaml file -db_config = CONFIG.get('database', {}) - -if not db_config: - db_config = {} - -# Environment variables take preference over config file! - -db_keys = ['ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT'] - -for key in db_keys: - # First, check the environment variables - env_key = f"INVENTREE_DB_{key}" - env_var = os.environ.get(env_key, None) - - if env_var: - # Make use PORT is int - if key == 'PORT': - try: - env_var = int(env_var) - except ValueError: - logger.error(f"Invalid number for {env_key}: {env_var}") - # Override configuration value - db_config[key] = env_var - -# Check that required database configuration options are specified -required_keys = ['ENGINE', 'NAME'] - -for key in required_keys: - if key not in db_config: # pragma: no cover - error_msg = f'Missing required database configuration value {key}' - logger.error(error_msg) - - print('Error: ' + error_msg) - sys.exit(-1) - -""" -Special considerations for the database 'ENGINE' setting. -It can be specified in config.yaml (or envvar) as either (for example): -- sqlite3 -- django.db.backends.sqlite3 -- django.db.backends.postgresql -""" - -db_engine = db_config['ENGINE'].lower() - -# Correct common misspelling -if db_engine == 'sqlite': - db_engine = 'sqlite3' # pragma: no cover - -if db_engine in ['sqlite3', 'postgresql', 'mysql']: - # Prepend the required python module string - db_engine = f'django.db.backends.{db_engine}' - db_config['ENGINE'] = db_engine - -db_name = db_config['NAME'] -db_host = db_config.get('HOST', "''") - -if 'sqlite' in db_engine: - db_name = str(Path(db_name).resolve()) - db_config['NAME'] = db_name - -logger.info(f"DB_ENGINE: {db_engine}") -logger.info(f"DB_NAME: {db_name}") -logger.info(f"DB_HOST: {db_host}") - -""" -In addition to base-level database configuration, we may wish to specify specific options to the database backend -Ref: https://docs.djangoproject.com/en/3.2/ref/settings/#std:setting-OPTIONS -""" - -# 'OPTIONS' or 'options' can be specified in config.yaml -# Set useful sensible timeouts for a transactional webserver to communicate -# with its database server, that is, if the webserver is having issues -# connecting to the database server (such as a replica failover) don't sit and -# wait for possibly an hour or more, just tell the client something went wrong -# and let the client retry when they want to. -db_options = db_config.get("OPTIONS", db_config.get("options", {})) - -# Specific options for postgres backend -if "postgres" in db_engine: # pragma: no cover - from psycopg2.extensions import (ISOLATION_LEVEL_READ_COMMITTED, - ISOLATION_LEVEL_SERIALIZABLE) - - # Connection timeout - if "connect_timeout" not in db_options: - # The DB server is in the same data center, it should not take very - # long to connect to the database server - # # seconds, 2 is minium allowed by libpq - db_options["connect_timeout"] = int( - get_setting('INVENTREE_DB_TIMEOUT', 'database.timeout', 2) - ) - - # Setup TCP keepalive - # DB server is in the same DC, it should not become unresponsive for - # very long. With the defaults below we wait 5 seconds for the network - # issue to resolve itself. It it that doesn't happen whatever happened - # is probably fatal and no amount of waiting is going to fix it. - # # 0 - TCP Keepalives disabled; 1 - enabled - if "keepalives" not in db_options: - db_options["keepalives"] = int( - get_setting('INVENTREE_DB_TCP_KEEPALIVES', 'database.tcp_keepalives', 1) - ) - - # Seconds after connection is idle to send keep alive - if "keepalives_idle" not in db_options: - db_options["keepalives_idle"] = int( - get_setting('INVENTREE_DB_TCP_KEEPALIVES_IDLE', 'database.tcp_keepalives_idle', 1) - ) - - # Seconds after missing ACK to send another keep alive - if "keepalives_interval" not in db_options: - db_options["keepalives_interval"] = int( - get_setting("INVENTREE_DB_TCP_KEEPALIVES_INTERVAL", "database.tcp_keepalives_internal", "1") - ) - - # Number of missing ACKs before we close the connection - if "keepalives_count" not in db_options: - db_options["keepalives_count"] = int( - get_setting("INVENTREE_DB_TCP_KEEPALIVES_COUNT", "database.tcp_keepalives_count", "5") - ) - - # # Milliseconds for how long pending data should remain unacked - # by the remote server - # TODO: Supported starting in PSQL 11 - # "tcp_user_timeout": int(os.getenv("PGTCP_USER_TIMEOUT", "1000"), - - # Postgres's default isolation level is Read Committed which is - # normally fine, but most developers think the database server is - # actually going to do Serializable type checks on the queries to - # protect against simultaneous changes. - # https://www.postgresql.org/docs/devel/transaction-iso.html - # https://docs.djangoproject.com/en/3.2/ref/databases/#isolation-level - if "isolation_level" not in db_options: - serializable = get_boolean_setting('INVENTREE_DB_ISOLATION_SERIALIZABLE', 'database.serializable', False) - db_options["isolation_level"] = ISOLATION_LEVEL_SERIALIZABLE if serializable else ISOLATION_LEVEL_READ_COMMITTED - -# Specific options for MySql / MariaDB backend -elif "mysql" in db_engine: # pragma: no cover - # TODO TCP time outs and keepalives - - # MariaDB's default isolation level is Repeatable Read which is - # normally fine, but most developers think the database server is - # actually going to Serializable type checks on the queries to - # protect against siumltaneous changes. - # https://mariadb.com/kb/en/mariadb-transactions-and-isolation-levels-for-sql-server-users/#changing-the-isolation-level - # https://docs.djangoproject.com/en/3.2/ref/databases/#mysql-isolation-level - if "isolation_level" not in db_options: - serializable = get_boolean_setting('INVENTREE_DB_ISOLATION_SERIALIZABLE', 'database.serializable', False) - db_options["isolation_level"] = "serializable" if serializable else "read committed" - -# Specific options for sqlite backend -elif "sqlite" in db_engine: - # TODO: Verify timeouts are not an issue because no network is involved for SQLite - - # SQLite's default isolation level is Serializable due to SQLite's - # single writer implementation. Presumably as a result of this, it is - # not possible to implement any lower isolation levels in SQLite. - # https://www.sqlite.org/isolation.html - pass - -# Provide OPTIONS dict back to the database configuration dict -db_config['OPTIONS'] = db_options - -# Set testing options for the database -db_config['TEST'] = { - 'CHARSET': 'utf8', -} - -# Set collation option for mysql test database -if 'mysql' in db_engine: - db_config['TEST']['COLLATION'] = 'utf8_general_ci' # pragma: no cover - -DATABASES = { - 'default': db_config -} - -# login settings -REMOTE_LOGIN = get_boolean_setting('INVENTREE_REMOTE_LOGIN', 'remote_login_enabled', False) -REMOTE_LOGIN_HEADER = get_setting('INVENTREE_REMOTE_LOGIN_HEADER', 'remote_login_header', 'REMOTE_USER') - -# sentry.io integration for error reporting -SENTRY_ENABLED = get_boolean_setting('INVENTREE_SENTRY_ENABLED', 'sentry_enabled', False) -# Default Sentry DSN (can be overriden if user wants custom sentry integration) -INVENTREE_DSN = 'https://3928ccdba1d34895abde28031fd00100@o378676.ingest.sentry.io/6494600' -SENTRY_DSN = get_setting('INVENTREE_SENTRY_DSN', 'sentry_dsn', INVENTREE_DSN) -SENTRY_SAMPLE_RATE = float(get_setting('INVENTREE_SENTRY_SAMPLE_RATE', 'sentry_sample_rate', 0.1)) - -if SENTRY_ENABLED and SENTRY_DSN: # pragma: no cover - sentry_sdk.init( - dsn=SENTRY_DSN, - integrations=[DjangoIntegration(), ], - traces_sample_rate=1.0 if DEBUG else SENTRY_SAMPLE_RATE, - send_default_pii=True - ) - inventree_tags = { - 'testing': TESTING, - 'docker': DOCKER, - 'debug': DEBUG, - 'remote': REMOTE_LOGIN, - } - for key, val in inventree_tags.items(): - sentry_sdk.set_tag(f'inventree_{key}', val) - -# Cache configuration -cache_host = get_setting('INVENTREE_CACHE_HOST', 'cache.host', None) -cache_port = get_setting('INVENTREE_CACHE_PORT', 'cache.port', '6379', typecast=int) - -if cache_host: # pragma: no cover - # We are going to rely upon a possibly non-localhost for our cache, - # so don't wait too long for the cache as nothing in the cache should be - # irreplacable. - _cache_options = { - "CLIENT_CLASS": "django_redis.client.DefaultClient", - "SOCKET_CONNECT_TIMEOUT": int(os.getenv("CACHE_CONNECT_TIMEOUT", "2")), - "SOCKET_TIMEOUT": int(os.getenv("CACHE_SOCKET_TIMEOUT", "2")), - "CONNECTION_POOL_KWARGS": { - "socket_keepalive": config.is_true( - os.getenv("CACHE_TCP_KEEPALIVE", "1") - ), - "socket_keepalive_options": { - socket.TCP_KEEPCNT: int( - os.getenv("CACHE_KEEPALIVES_COUNT", "5") - ), - socket.TCP_KEEPIDLE: int( - os.getenv("CACHE_KEEPALIVES_IDLE", "1") - ), - socket.TCP_KEEPINTVL: int( - os.getenv("CACHE_KEEPALIVES_INTERVAL", "1") - ), - socket.TCP_USER_TIMEOUT: int( - os.getenv("CACHE_TCP_USER_TIMEOUT", "1000") - ), - }, - }, - } - CACHES = { - "default": { - "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": f"redis://{cache_host}:{cache_port}/0", - "OPTIONS": _cache_options, - }, - } -else: - CACHES = { - "default": { - "BACKEND": "django.core.cache.backends.locmem.LocMemCache", - }, - } - -_q_worker_timeout = int(get_setting('INVENTREE_BACKGROUND_TIMEOUT', 'background.timeout', 90)) - -# django-q background worker configuration -Q_CLUSTER = { - 'name': 'InvenTree', - 'label': 'Background Tasks', - 'workers': int(get_setting('INVENTREE_BACKGROUND_WORKERS', 'background.workers', 4)), - 'timeout': _q_worker_timeout, - 'retry': min(120, _q_worker_timeout + 30), - 'max_attempts': int(get_setting('INVENTREE_BACKGROUND_MAX_ATTEMPTS', 'background.max_attempts', 5)), - 'queue_limit': 50, - 'catch_up': False, - 'bulk': 10, - 'orm': 'default', - 'cache': 'default', - 'sync': False, -} - -# Configure django-q sentry integration -if SENTRY_ENABLED and SENTRY_DSN: - Q_CLUSTER['error_reporter'] = { - 'sentry': { - 'dsn': SENTRY_DSN - } - } - -if cache_host: # pragma: no cover - # If using external redis cache, make the cache the broker for Django Q - # as well - Q_CLUSTER["django_redis"] = "worker" - -# database user sessions -SESSION_ENGINE = 'user_sessions.backends.db' -LOGOUT_REDIRECT_URL = get_setting('INVENTREE_LOGOUT_REDIRECT_URL', 'logout_redirect_url', 'index') -SILENCED_SYSTEM_CHECKS = [ - 'admin.E410', -] - -# Password validation -# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators - -AUTH_PASSWORD_VALIDATORS = [ - { - 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', - }, -] - -# Extra (optional) URL validators -# See https://docs.djangoproject.com/en/2.2/ref/validators/#django.core.validators.URLValidator - -EXTRA_URL_SCHEMES = get_setting('INVENTREE_EXTRA_URL_SCHEMES', 'extra_url_schemes', []) - -if type(EXTRA_URL_SCHEMES) not in [list]: # pragma: no cover - logger.warning("extra_url_schemes not correctly formatted") - EXTRA_URL_SCHEMES = [] - -# Internationalization -# https://docs.djangoproject.com/en/dev/topics/i18n/ -LANGUAGE_CODE = get_setting('INVENTREE_LANGUAGE', 'language', 'en-us') -# Store language settings for 30 days -LANGUAGE_COOKIE_AGE = 2592000 - -# If a new language translation is supported, it must be added here -LANGUAGES = [ - ('cs', _('Czech')), - ('da', _('Danish')), - ('de', _('German')), - ('el', _('Greek')), - ('en', _('English')), - ('es', _('Spanish')), - ('es-mx', _('Spanish (Mexican)')), - ('fa', _('Farsi / Persian')), - ('fr', _('French')), - ('he', _('Hebrew')), - ('hu', _('Hungarian')), - ('it', _('Italian')), - ('ja', _('Japanese')), - ('ko', _('Korean')), - ('nl', _('Dutch')), - ('no', _('Norwegian')), - ('pl', _('Polish')), - ('pt', _('Portuguese')), - ('pt-BR', _('Portuguese (Brazilian)')), - ('ru', _('Russian')), - ('sl', _('Slovenian')), - ('sv', _('Swedish')), - ('th', _('Thai')), - ('tr', _('Turkish')), - ('vi', _('Vietnamese')), - ('zh-hans', _('Chinese')), -] - -# Testing interface translations -if get_boolean_setting('TEST_TRANSLATIONS', default_value=False): # pragma: no cover - # Set default language - LANGUAGE_CODE = 'xx' - - # Add to language catalog - LANGUAGES.append(('xx', 'Test')) - - # Add custom languages not provided by Django - EXTRA_LANG_INFO = { - 'xx': { - 'code': 'xx', - 'name': 'Test', - 'name_local': 'Test' - }, - } - LANG_INFO = dict(django.conf.locale.LANG_INFO, **EXTRA_LANG_INFO) - django.conf.locale.LANG_INFO = LANG_INFO - -# Currencies available for use -CURRENCIES = get_setting( - 'INVENTREE_CURRENCIES', 'currencies', - ['AUD', 'CAD', 'CNY', 'EUR', 'GBP', 'JPY', 'NZD', 'USD'], - typecast=list, -) - -# Maximum number of decimal places for currency rendering -CURRENCY_DECIMAL_PLACES = 6 - -# Check that each provided currency is supported -for currency in CURRENCIES: - if currency not in moneyed.CURRENCIES: # pragma: no cover - logger.error(f"Currency code '{currency}' is not supported") - sys.exit(1) - -# Custom currency exchange backend -EXCHANGE_BACKEND = 'InvenTree.exchange.InvenTreeExchange' - -# Email configuration options -EMAIL_BACKEND = get_setting('INVENTREE_EMAIL_BACKEND', 'email.backend', 'django.core.mail.backends.smtp.EmailBackend') -EMAIL_HOST = get_setting('INVENTREE_EMAIL_HOST', 'email.host', '') -EMAIL_PORT = get_setting('INVENTREE_EMAIL_PORT', 'email.port', 25, typecast=int) -EMAIL_HOST_USER = get_setting('INVENTREE_EMAIL_USERNAME', 'email.username', '') -EMAIL_HOST_PASSWORD = get_setting('INVENTREE_EMAIL_PASSWORD', 'email.password', '') -EMAIL_SUBJECT_PREFIX = get_setting('INVENTREE_EMAIL_PREFIX', 'email.prefix', '[InvenTree] ') -EMAIL_USE_TLS = get_boolean_setting('INVENTREE_EMAIL_TLS', 'email.tls', False) -EMAIL_USE_SSL = get_boolean_setting('INVENTREE_EMAIL_SSL', 'email.ssl', False) - -DEFAULT_FROM_EMAIL = get_setting('INVENTREE_EMAIL_SENDER', 'email.sender', '') - -EMAIL_USE_LOCALTIME = False -EMAIL_TIMEOUT = 60 - -LOCALE_PATHS = ( - BASE_DIR.joinpath('locale/'), -) - -TIME_ZONE = get_setting('INVENTREE_TIMEZONE', 'timezone', 'UTC') - -USE_I18N = True - -USE_L10N = True - -# Do not use native timezone support in "test" mode -# It generates a *lot* of cruft in the logs -if not TESTING: - USE_TZ = True # pragma: no cover - -DATE_INPUT_FORMATS = [ - "%Y-%m-%d", -] - -# crispy forms use the bootstrap templates -CRISPY_TEMPLATE_PACK = 'bootstrap4' - -# Use database transactions when importing / exporting data -IMPORT_EXPORT_USE_TRANSACTIONS = True - -SITE_ID = 1 - -# Load the allauth social backends -SOCIAL_BACKENDS = get_setting('INVENTREE_SOCIAL_BACKENDS', 'social_backends', [], typecast=list) - -for app in SOCIAL_BACKENDS: - INSTALLED_APPS.append(app) # pragma: no cover - -SOCIALACCOUNT_PROVIDERS = get_setting('INVENTREE_SOCIAL_PROVIDERS', 'social_providers', None, typecast=dict) - -SOCIALACCOUNT_STORE_TOKENS = True - -# settings for allauth -ACCOUNT_EMAIL_CONFIRMATION_EXPIRE_DAYS = get_setting('INVENTREE_LOGIN_CONFIRM_DAYS', 'login_confirm_days', 3, typecast=int) -ACCOUNT_LOGIN_ATTEMPTS_LIMIT = get_setting('INVENTREE_LOGIN_ATTEMPTS', 'login_attempts', 5, typecast=int) -ACCOUNT_DEFAULT_HTTP_PROTOCOL = get_setting('INVENTREE_LOGIN_DEFAULT_HTTP_PROTOCOL', 'login_default_protocol', 'http') -ACCOUNT_LOGOUT_ON_PASSWORD_CHANGE = True -ACCOUNT_PREVENT_ENUMERATION = True - -# override forms / adapters -ACCOUNT_FORMS = { - 'login': 'allauth.account.forms.LoginForm', - 'signup': 'InvenTree.forms.CustomSignupForm', - 'add_email': 'allauth.account.forms.AddEmailForm', - 'change_password': 'allauth.account.forms.ChangePasswordForm', - 'set_password': 'allauth.account.forms.SetPasswordForm', - 'reset_password': 'allauth.account.forms.ResetPasswordForm', - 'reset_password_from_key': 'allauth.account.forms.ResetPasswordKeyForm', - 'disconnect': 'allauth.socialaccount.forms.DisconnectForm', -} - -SOCIALACCOUNT_ADAPTER = 'InvenTree.forms.CustomSocialAccountAdapter' -ACCOUNT_ADAPTER = 'InvenTree.forms.CustomAccountAdapter' - -# Markdownify configuration -# Ref: https://django-markdownify.readthedocs.io/en/latest/settings.html - -MARKDOWNIFY = { - 'default': { - 'BLEACH': True, - 'WHITELIST_ATTRS': [ - 'href', - 'src', - 'alt', - ], - 'MARKDOWN_EXTENSIONS': [ - 'markdown.extensions.extra' - ], - 'WHITELIST_TAGS': [ - 'a', - 'abbr', - 'b', - 'blockquote', - 'em', - 'h1', 'h2', 'h3', - 'i', - 'img', - 'li', - 'ol', - 'p', - 'strong', - 'ul', - 'table', - 'thead', - 'tbody', - 'th', - 'tr', - 'td' - ], - } -} - -# Ignore these error typeps for in-database error logging -IGNORED_ERRORS = [ - Http404, - django.core.exceptions.PermissionDenied, -] - -# Maintenance mode -MAINTENANCE_MODE_RETRY_AFTER = 60 -MAINTENANCE_MODE_STATE_BACKEND = 'maintenance_mode.backends.StaticStorageBackend' - -# Are plugins enabled? -PLUGINS_ENABLED = get_boolean_setting('INVENTREE_PLUGINS_ENABLED', 'plugins_enabled', False) - -PLUGIN_FILE = config.get_plugin_file() - -# Plugin test settings -PLUGIN_TESTING = get_setting('INVENTREE_PLUGIN_TESTING', 'PLUGIN_TESTING', TESTING) # Are plugins beeing tested? -PLUGIN_TESTING_SETUP = get_setting('INVENTREE_PLUGIN_TESTING_SETUP', 'PLUGIN_TESTING_SETUP', False) # Load plugins from setup hooks in testing? -PLUGIN_TESTING_EVENTS = False # Flag if events are tested right now -PLUGIN_RETRY = get_setting('INVENTREE_PLUGIN_RETRY', 'PLUGIN_RETRY', 5) # How often should plugin loading be tried? -PLUGIN_FILE_CHECKED = False # Was the plugin file checked? - -# User interface customization values -CUSTOM_LOGO = get_custom_file('INVENTREE_CUSTOM_LOGO', 'customize.logo', 'custom logo', lookup_media=True) -CUSTOM_SPLASH = get_custom_file('INVENTREE_CUSTOM_SPLASH', 'customize.splash', 'custom splash') - -CUSTOMIZE = get_setting('INVENTREE_CUSTOMIZE', 'customize', {}) -if DEBUG: - logger.info("InvenTree running with DEBUG enabled") - -logger.info(f"MEDIA_ROOT: '{MEDIA_ROOT}'") -logger.info(f"STATIC_ROOT: '{STATIC_ROOT}'") diff --git a/sample.env b/sample.env index 9fff491..f1bae5d 100644 --- a/sample.env +++ b/sample.env @@ -38,7 +38,7 @@ INVENTREE_GUNICORN_TIMEOUT=30 INVENTREE_PLUGINS_ENABLED=False # Image tag that should be used -INVENTREE_IMAGE=inventree/inventree:0.10.1 +INVENTREE_IMAGE=inventree/inventree:0.11.3 REDIS_IMAGE=redis:7.0-alpine NGINX_IMAGE=nginxinc/nginx-unprivileged:stable-alpine # Postgres image must match version of pgdump in inventree image From 79d0dcf47feefe875c34e6805a669131697387c5 Mon Sep 17 00:00:00 2001 From: Philipp Date: Wed, 17 Sep 2025 15:26:16 +0200 Subject: [PATCH 16/24] add license to project --- LICENSE | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 LICENSE diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..2071b23 --- /dev/null +++ b/LICENSE @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. From e21a96dcb5e0945aa705d6b0fa383b25ac1fe9d2 Mon Sep 17 00:00:00 2001 From: Philipp Date: Wed, 17 Sep 2025 15:26:17 +0200 Subject: [PATCH 17/24] update documentation, rename compose file --- README.md | 26 ++++++++++++++++++++++++-- docker-compose.yml => compose.yml | 0 2 files changed, 24 insertions(+), 2 deletions(-) rename docker-compose.yml => compose.yml (100%) diff --git a/README.md b/README.md index 598fec6..a9f3ffd 100644 --- a/README.md +++ b/README.md @@ -8,15 +8,37 @@ These are the deployment files required to get InvenTree up and running. InvenTr ## Configuration -Copy the `sample.env` into a file named `.env` and make sure to adapt all values to your needs, especially secrets. +Copy the `sample.env` into a file named `.env` and make sure to adapt all values to your needs, especially secrets. Note that the redis cache is disabled by default. ## Installation -In order to run invoke an update or complete the first setup, `make update` is used to pull the latest images and apply all database migrations. +In order to run invoke an update or complete the first setup, the data folder must be created (`mkdir data`) and `make update` is used to pull the latest images and apply all database migrations. The command `make up` can be used to run the setup as a foreground service, `make "up -d"` can be used to run the setup in detached mode. +> Warning: + +When running inside LXC (e.g. Proxmox) with ZFS you might need to run add the following to your `/etc/docker/daemon.json` (and restart afterwards): + +```json +{ + "storage-driver": "vfs" +} +``` + +This can be required because Docker does not like ZFS and might have issues to store some layers. Note that using VFS has significant impact on the storage usage (might explode). + ### SSO +The following is an example on configuring SSO using OIDC and Keycloak as IdP. See the [InvenTree SSO docs](https://docs.inventree.org/en/latest/settings/SSO) for more details. + +#### Keycloak + +1. Create a new client (Type: OpenID Connect) +2. Enable Client authentication, Authorization, Standard flow. Disable everything else (explicitly Direct access grant) +3. Set home URL to `https://` and Redirect URL to `https:///accounts/keycloak/login/callback/`. + +#### Inventree + Login as InvenTree admin user. Under `Settings > Login Settings` make sure to `Enable SSO`. diff --git a/docker-compose.yml b/compose.yml similarity index 100% rename from docker-compose.yml rename to compose.yml From 6ec913e595e30a8882667a0528c186adfebd8b16 Mon Sep 17 00:00:00 2001 From: Philipp Date: Wed, 17 Sep 2025 15:26:17 +0200 Subject: [PATCH 18/24] migrate from keycloak to oidc provider Django AllAuth dropped keycloak in recent release: https://django-allauth.readthedocs.io/en/latest/socialaccount/providers/keycloak.html --- README.md | 9 ++++++--- compose.yml | 19 +++++++++---------- sample.env | 9 ++++----- 3 files changed, 19 insertions(+), 18 deletions(-) diff --git a/README.md b/README.md index a9f3ffd..7b98dfd 100644 --- a/README.md +++ b/README.md @@ -30,15 +30,18 @@ This can be required because Docker does not like ZFS and might have issues to s ### SSO -The following is an example on configuring SSO using OIDC and Keycloak as IdP. See the [InvenTree SSO docs](https://docs.inventree.org/en/latest/settings/SSO) for more details. +The following is an example on configuring SSO using OIDC and Keycloak as IdP. See the [InvenTree SSO docs](https://docs.inventree.org/en/latest/settings/SSO) as well as the [AllAuth social providers](https://django-allauth.readthedocs.io/en/latest/socialaccount/providers/index.html) for more details. Note that Keycloak is not a valid provider anymore any OIDC [should be used](https://django-allauth.readthedocs.io/en/latest/socialaccount/providers/keycloak.html). #### Keycloak 1. Create a new client (Type: OpenID Connect) 2. Enable Client authentication, Authorization, Standard flow. Disable everything else (explicitly Direct access grant) -3. Set home URL to `https://` and Redirect URL to `https:///accounts/keycloak/login/callback/`. +3. Set home URL to `https://` and Redirect URL to `https:///accounts/oidc/login/callback/`. #### Inventree -Login as InvenTree admin user. Under `Settings > Login Settings` make sure to `Enable SSO`. +Login as InvenTree admin user and open the admin interface. +1. In the Admin interface, add a new group named "default" and click on "Save and edit". Disallow reading the admin settings and allow all other permissions you want a default user to have. +2. In the `Social Accounts` block, add a new Social Application. The provider must be oidc, the name and client id can be arbitrary value. The available default site must be added. +3. Go back to the application. Under `Settings > Login Settings` make sure to `Enable SSO` and `Enable SSO registration`. Also, set `Group on signup` to your default group. diff --git a/compose.yml b/compose.yml index 6d6dd69..dc7908c 100644 --- a/compose.yml +++ b/compose.yml @@ -82,17 +82,16 @@ services: environment: INVENTREE_SOCIAL_PROVIDERS: | { - "keycloak": { - "SERVERS": [ - { - "KEYCLOAK_URL": "${HKNG_KEYCLOAK_URL:?You must provide the 'HKNG_KEYCLOAK_URL' variable in the .env file}", - "KEYCLOAK_REALM": "${HKNG_KEYCLOAK_REALM:?You must provide the 'HKNG_KEYCLOAK_REALM' variable in the .env file}", - "APP": { - "client_id": "${HKNG_KEYCLOAK_CLIENT_ID:?You must provide the 'HKNG_KEYCLOAK_CLIENT_ID' variable in the .env file}", - "secret": "${HKNG_KEYCLOAK_CLIENT_SECRET:?You must provide the 'HKNG_KEYCLOAK_CLIENT_SECRET' variable in the .env file}" - } + "openid_connect": { + "SERVERS": [{ + "id": "oidc", + "name": "Hacknang SSO", + "server_url": "${HKNG_OIDC_URL:?You must provide the 'HKNG_OIDC_URL' variable in the .env file}", + "APP": { + "client_id": "${HKNG_OIDC_CLIENT_ID:?You must provide the 'HKNG_OIDC_CLIENT_ID' variable in the .env file}", + "secret": "${HKNG_OIDC_CLIENT_SECRET:?You must provide the 'HKNG_OIDC_CLIENT_SECRET' variable in the .env file}" } - ] + }] } } depends_on: diff --git a/sample.env b/sample.env index f1bae5d..b22915b 100644 --- a/sample.env +++ b/sample.env @@ -60,9 +60,8 @@ INVENTREE_SECRET_KEY=some-secret-key ALLOWED_HOSTS=inventree.example.com,www.inventree.example.com # SSO Config -INVENTREE_SOCIAL_BACKENDS=allauth.socialaccount.providers.keycloak +INVENTREE_SOCIAL_BACKENDS=allauth.socialaccount.providers.openid_connect -HKNG_KEYCLOAK_URL=https://keycloak.example.com -HKNG_KEYCLOAK_REALM=master -HKNG_KEYCLOAK_CLIENT_ID=example-client -HKNG_KEYCLOAK_CLIENT_SECRET=example-secret +HKNG_OIDC_URL=https://keycloak.example.com/realms/master/.well-known/openid-configuration +HKNG_OIDC_CLIENT_ID=example-client +HKNG_OIDC_SECRET=example-secret From 84887eb811b3836ab8f6fd7f1bd638293bc8ac1d Mon Sep 17 00:00:00 2001 From: Philipp Date: Wed, 17 Sep 2025 15:26:18 +0200 Subject: [PATCH 19/24] remove crappy volume stuff --- compose.yml | 18 ++++-------------- sample.env | 2 +- 2 files changed, 5 insertions(+), 15 deletions(-) diff --git a/compose.yml b/compose.yml index dc7908c..b5c9c21 100644 --- a/compose.yml +++ b/compose.yml @@ -51,7 +51,7 @@ services: - POSTGRES_DB=${INVENTREE_DB_NAME:?You must provide the 'INVENTREE_DB_NAME' variable in the .env file} volumes: # Map 'data' volume such that postgres database is stored externally - - inventree_data:/var/lib/postgresql/data/:z + - ${INVENTREE_EXT_VOLUME:?You must specify the 'INVENTREE_EXT_VOLUME' variable in the .env file!}:/var/lib/postgresql/data/:z restart: unless-stopped # redis acts as database cache manager @@ -98,7 +98,7 @@ services: - inventree-db volumes: # Data volume must map to /home/inventree/data - - inventree_data:/home/inventree/data:z + - ${INVENTREE_EXT_VOLUME:?You must specify the 'INVENTREE_EXT_VOLUME' variable in the .env file!}:/home/inventree/data:z - ./plugins:/home/inventree/InvenTree/plugins:z restart: unless-stopped @@ -114,7 +114,7 @@ services: - .env volumes: # Data volume must map to /home/inventree/data - - inventree_data:/home/inventree/data:z + - ${INVENTREE_EXT_VOLUME:?You must specify the 'INVENTREE_EXT_VOLUME' variable in the .env file!}:/home/inventree/data:z restart: unless-stopped # nginx acts as a reverse proxy @@ -135,15 +135,5 @@ services: # Refer to the provided example file as a starting point - ./nginx.prod.conf:/etc/nginx/conf.d/default.conf:ro,Z # nginx proxy needs access to static and media files - - inventree_data:/var/www:z + - ${INVENTREE_EXT_VOLUME:?You must specify the 'INVENTREE_EXT_VOLUME' variable in the .env file!}:/var/www:z restart: unless-stopped - -volumes: - # Persistent data, stored external to the container(s) - inventree_data: - driver: local - driver_opts: - type: none - o: bind - # This directory specified where InvenTree data are stored "outside" the docker containers - device: ${INVENTREE_EXT_VOLUME:?You must specify the 'INVENTREE_EXT_VOLUME' variable in the .env file!} diff --git a/sample.env b/sample.env index b22915b..1904bc9 100644 --- a/sample.env +++ b/sample.env @@ -5,7 +5,7 @@ COMPOSE_PROJECT_NAME=inventree # Note: You *must* un-comment this line, and point it to a path on your local machine # e.g. Linux -INVENTREE_EXT_VOLUME=data +INVENTREE_EXT_VOLUME=./data # e.g. Windows (docker desktop) #INVENTREE_EXT_VOLUME=c:/Users/me/inventree-data From 41be255144cc288e6be0ac17c054b75c7a630b61 Mon Sep 17 00:00:00 2001 From: Philipp Date: Wed, 17 Sep 2025 15:26:19 +0200 Subject: [PATCH 20/24] add new INVENTREE_SITE_URL variable --- sample.env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sample.env b/sample.env index 1904bc9..40df1e0 100644 --- a/sample.env +++ b/sample.env @@ -57,7 +57,7 @@ INVENTREE_DB_PASSWORD=password # Django configuration INVENTREE_SECRET_KEY=some-secret-key -ALLOWED_HOSTS=inventree.example.com,www.inventree.example.com +INVENTREE_SITE_URL=inventree.example.com,www.inventree.example.com # SSO Config INVENTREE_SOCIAL_BACKENDS=allauth.socialaccount.providers.openid_connect From c863e5fe21a245f6e5c11325f9cfddb19c4fd977 Mon Sep 17 00:00:00 2001 From: Philipp Date: Wed, 17 Sep 2025 15:26:20 +0200 Subject: [PATCH 21/24] cleanup deployment --- Makefile | 4 ++-- backup.sh | 6 ++++++ compose.yml | 15 --------------- 3 files changed, 8 insertions(+), 17 deletions(-) create mode 100644 backup.sh diff --git a/Makefile b/Makefile index a152205..11ba6f3 100644 --- a/Makefile +++ b/Makefile @@ -8,9 +8,9 @@ psql: update: read -p "Update will cause downtime of the server. Are you sure you want to continue? Press Ctrl+c to abort!" _ - $(COMPOSE) down $(COMPOSE) pull - $(COMPOSE) run inventree-server invoke update + $(COMPOSE) down + $(COMPOSE) run --rm inventree-server invoke update $(COMPOSE) up -d data: # podman does not autocreate data folder diff --git a/backup.sh b/backup.sh new file mode 100644 index 0000000..81ad1e5 --- /dev/null +++ b/backup.sh @@ -0,0 +1,6 @@ +#!/bin/sh +time=$(date +"%Y-%m-%dT%H:%M:%S%z") +dir=backup +[ -d "${dir}" ] || mkdir -p "${dir}" +docker compose run --rm -u postgres inventree-db sh -c 'PGPASSWORD=$POSTGRES_PASSWORD pg_dump -h inventree-db -p 5432 -U $POSTGRES_USER inventree' > "${dir}/${time}.sql" +# to restore: pg_restore -d newdb db.dump diff --git a/compose.yml b/compose.yml index b5c9c21..efafa65 100644 --- a/compose.yml +++ b/compose.yml @@ -79,21 +79,6 @@ services: - 8000 env_file: - .env - environment: - INVENTREE_SOCIAL_PROVIDERS: | - { - "openid_connect": { - "SERVERS": [{ - "id": "oidc", - "name": "Hacknang SSO", - "server_url": "${HKNG_OIDC_URL:?You must provide the 'HKNG_OIDC_URL' variable in the .env file}", - "APP": { - "client_id": "${HKNG_OIDC_CLIENT_ID:?You must provide the 'HKNG_OIDC_CLIENT_ID' variable in the .env file}", - "secret": "${HKNG_OIDC_CLIENT_SECRET:?You must provide the 'HKNG_OIDC_CLIENT_SECRET' variable in the .env file}" - } - }] - } - } depends_on: - inventree-db volumes: From a9094ff0817d5f3c5a1abc59257f6e454264cc5a Mon Sep 17 00:00:00 2001 From: Philipp Date: Wed, 17 Sep 2025 15:26:21 +0200 Subject: [PATCH 22/24] adhere to upstream compose file for v1.0.0 --- Caddyfile | 77 ++++++++++++++++++++++++++++++++++++++++++ compose.yml | 72 +++++++++++++++++++-------------------- nginx.prod.conf | 64 ----------------------------------- sample.env | 90 +++++++++++++++++++++---------------------------- 4 files changed, 151 insertions(+), 152 deletions(-) create mode 100644 Caddyfile delete mode 100644 nginx.prod.conf diff --git a/Caddyfile b/Caddyfile new file mode 100644 index 0000000..23268c6 --- /dev/null +++ b/Caddyfile @@ -0,0 +1,77 @@ +# Example Caddyfile for InvenTree +# The following environment variables may be used: +# - INVENTREE_SITE_URL: The upstream URL of the InvenTree site (default: inventree.localhost) +# - INVENTREE_SERVER: The internal URL of the InvenTree container (default: http://inventree-server:8000) +# +# Note that while this file is a good starting point, it may need to be modified to suit your specific requirements +# +# Ref to the Caddyfile documentation: https://caddyserver.com/docs/caddyfile + + +# Logging configuration for Caddy +(log_common) { + log { + output file /var/log/caddy/{args[0]}.access.log + } +} + +# CORS headers control (used for static and media files) +(cors-headers) { + header Allow GET,HEAD,OPTIONS + header Access-Control-Allow-Origin * + header Access-Control-Allow-Methods GET,HEAD,OPTIONS + header Access-Control-Allow-Headers Authorization,Content-Type,User-Agent + + @cors_preflight{args[0]} method OPTIONS + + handle @cors_preflight{args[0]} { + respond "" 204 + } +} + +# The default server address is configured in the .env file +# If not specified, the default address is used - http://inventree.localhost +# If you need to listen on multiple addresses, or use a different port, you can modify this section directly +http://inventree.ctbk.de { + import log_common inventree + + encode gzip + + request_body { + max_size 100MB + } + + # Handle static request files + handle_path /static/* { + import cors-headers static + + root * /var/www/static + file_server + } + + # Handle media request files + handle_path /media/* { + import cors-headers media + + root * /var/www/media + file_server + + # Force download of media files (for security) + # Comment out this line if you do not want to force download + header Content-Disposition attachment + + # Authentication is handled by the forward_auth directive + # This is required to ensure that media files are only accessible to authenticated users + forward_auth {$INVENTREE_SERVER:"http://inventree-server:8000"} { + uri /auth/ + } + } + + # All other requests are proxied to the InvenTree server + reverse_proxy {$INVENTREE_SERVER:"http://inventree-server:8000"} { + + # If you are running behind another proxy, you may need to specify 'trusted_proxies' + # Ref: https://caddyserver.com/docs/json/apps/http/servers/trusted_proxies/ + # trusted_proxies ... + } +} diff --git a/compose.yml b/compose.yml index efafa65..f5af5c6 100644 --- a/compose.yml +++ b/compose.yml @@ -1,10 +1,8 @@ -version: "3.8" - # Docker compose recipe for a production-ready InvenTree setup, with the following containers: # - PostgreSQL as the database backend # - gunicorn as the InvenTree web server # - django-q as the InvenTree background worker process -# - nginx as a reverse proxy +# - Caddy as a reverse proxy # - redis as the cache manager (optional, disabled by default) # --------------------- @@ -34,16 +32,20 @@ version: "3.8" # INVENTREE_TAG=0.7.5 # +# ---------------------------- +# Docker compose customization +# ---------------------------- +# If you wish to customize the docker-compose script, you should only do so if you understand the stack! +# Do not expect support for customizations that are not part of the standard InvenTree setup! + services: # Database service # Use PostgreSQL as the database backend inventree-db: + image: postgres:13 container_name: inventree-db - image: ${POSTGRES_IMAGE:?You must provide the 'POSTGRES_IMAGE' variable in the .env file} expose: - ${INVENTREE_DB_PORT:-5432}/tcp - env_file: - - .env environment: - PGDATA=/var/lib/postgresql/data/pgdb - POSTGRES_USER=${INVENTREE_DB_USER:?You must provide the 'INVENTREE_DB_USER' variable in the .env file} @@ -55,43 +57,39 @@ services: restart: unless-stopped # redis acts as database cache manager - # only runs under the "redis" profile : https://docs.docker.com/compose/profiles/ inventree-cache: + image: redis:7-alpine container_name: inventree-cache - image: ${REDIS_IMAGE:?You must provide the 'REDIS_IMAGE' variable in the .env file} - depends_on: - - inventree-db env_file: - .env - profiles: - - redis expose: - - ${INVENTREE_CACHE_PORT:-6379} + - ${INVENTREE_CACHE_PORT:-6379} restart: always # InvenTree web server service # Uses gunicorn as the web server inventree-server: - container_name: inventree-server # If you wish to specify a particular InvenTree version, do so here - image: ${INVENTREE_IMAGE:?You must provide the 'INVENTREE_IMAGE' variable in the .env file} + image: inventree/inventree:${INVENTREE_TAG:-stable} + container_name: inventree-server + # Only change this port if you understand the stack. expose: - - 8000 - env_file: - - .env + - 8000 depends_on: - inventree-db + - inventree-cache + env_file: + - .env volumes: # Data volume must map to /home/inventree/data - - ${INVENTREE_EXT_VOLUME:?You must specify the 'INVENTREE_EXT_VOLUME' variable in the .env file!}:/home/inventree/data:z - - ./plugins:/home/inventree/InvenTree/plugins:z + - ${INVENTREE_EXT_VOLUME}:/home/inventree/data:z restart: unless-stopped # Background worker process handles long-running or periodic tasks inventree-worker: - container_name: inventree-worker # If you wish to specify a particular InvenTree version, do so here - image: ${INVENTREE_IMAGE:?You must provide the 'INVENTREE_IMAGE' variable in the .env file} + image: inventree/inventree:${INVENTREE_TAG:-stable} + container_name: inventree-worker command: invoke worker depends_on: - inventree-server @@ -99,26 +97,26 @@ services: - .env volumes: # Data volume must map to /home/inventree/data - - ${INVENTREE_EXT_VOLUME:?You must specify the 'INVENTREE_EXT_VOLUME' variable in the .env file!}:/home/inventree/data:z + - ${INVENTREE_EXT_VOLUME}:/home/inventree/data:z restart: unless-stopped - # nginx acts as a reverse proxy - # static files are served directly by nginx - # media files are served by nginx, although authentication is redirected to inventree-server - # web requests are redirected to gunicorn - # NOTE: You will need to provide a working nginx.conf file! + # caddy acts as reverse proxy and static file server + # https://hub.docker.com/_/caddy inventree-proxy: container_name: inventree-proxy - image: ${NGINX_IMAGE:?You must provide the 'NGINX_IMAGE' variable in the .env file} + image: caddy:alpine + restart: always depends_on: - inventree-server ports: - # Default web port is 1337 (can be changed in the .env file) - - ${INVENTREE_WEB_PORT:-1337}:8080 + - ${INVENTREE_WEB_PORT:-80}:80 + - 443:443 + env_file: + - .env volumes: - # Provide nginx configuration file to the container - # Refer to the provided example file as a starting point - - ./nginx.prod.conf:/etc/nginx/conf.d/default.conf:ro,Z - # nginx proxy needs access to static and media files - - ${INVENTREE_EXT_VOLUME:?You must specify the 'INVENTREE_EXT_VOLUME' variable in the .env file!}:/var/www:z - restart: unless-stopped + - ./Caddyfile:/etc/caddy/Caddyfile:ro,z + - ${INVENTREE_EXT_VOLUME}/static:/var/www/static:z + - ${INVENTREE_EXT_VOLUME}/media:/var/www/media:z + - ${INVENTREE_EXT_VOLUME}:/var/log:z + - ${INVENTREE_EXT_VOLUME}:/data:z + - ${INVENTREE_EXT_VOLUME}:/config:z diff --git a/nginx.prod.conf b/nginx.prod.conf deleted file mode 100644 index 1ebdcd2..0000000 --- a/nginx.prod.conf +++ /dev/null @@ -1,64 +0,0 @@ -server { - - # Listen for connection on (internal) port 8080 (unprivileged nginx) - listen 8080; - - real_ip_header proxy_protocol; - - location / { - - proxy_set_header Host $http_host; - proxy_set_header X-Forwarded-By $server_addr:$server_port; - proxy_set_header X-Forwarded-For $remote_addr; - proxy_set_header X-Forwarded-Proto $scheme; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header CLIENT_IP $remote_addr; - - proxy_pass_request_headers on; - - proxy_redirect off; - - client_max_body_size 100M; - - proxy_buffering off; - proxy_request_buffering off; - - # Change 'inventree-server' to the name of the inventree server container, - # and '8000' to the INVENTREE_WEB_PORT (if not default) - proxy_pass http://inventree-server:8000; - } - - # Redirect any requests for static files - location /static/ { - alias /var/www/static/; - autoindex on; - - # Caching settings - expires 30d; - add_header Pragma public; - add_header Cache-Control "public"; - } - - # Redirect any requests for media files - location /media/ { - alias /var/www/media/; - - # Media files require user authentication - auth_request /auth; - - # Content header to force download - add_header Content-disposition "attachment"; - } - - # Use the 'user' API endpoint for auth - location /auth { - internal; - - proxy_pass http://inventree-server:8000/auth/; - - proxy_pass_request_body off; - proxy_set_header Content-Length ""; - proxy_set_header X-Original-URI $request_uri; - } - -} diff --git a/sample.env b/sample.env index 40df1e0..859f58e 100644 --- a/sample.env +++ b/sample.env @@ -1,67 +1,55 @@ -# InvenTree environment variables for a postgresql production setup +# InvenTree environment variables for docker compose deployment +# For a full list of the available configuration options, refer to the InvenTree documentation: +# https://docs.inventree.org/en/stable/start/config/ + +# Specify the name of the docker-compose project COMPOSE_PROJECT_NAME=inventree -# Location of persistent database data (stored external to the docker containers) -# Note: You *must* un-comment this line, and point it to a path on your local machine +# InvenTree version tag (e.g. 'stable' / 'latest' / 'x.x.x') +INVENTREE_TAG=stable -# e.g. Linux -INVENTREE_EXT_VOLUME=./data +# InvenTree server URL - update this to match your server URL +INVENTREE_SITE_URL="http://inventree.localhost" +#INVENTREE_SITE_URL="http://192.168.1.2" # You can specify a local IP address here +#INVENTREE_SITE_URL="https://inventree.my-domain.com" # Or a public domain name (which you control) -# e.g. Windows (docker desktop) -#INVENTREE_EXT_VOLUME=c:/Users/me/inventree-data - -# Default web port for the InvenTree server -INVENTREE_WEB_PORT=8080 +# Specify the location of the external data volume +# By default, placed in local directory 'inventree-data' +INVENTREE_EXT_VOLUME=./inventree-data # Ensure debug is false for a production setup -INVENTREE_DEBUG=False INVENTREE_LOG_LEVEL=WARNING +# Enable custom plugins? +INVENTREE_PLUGINS_ENABLED=True + +# Run migrations automatically? +INVENTREE_AUTO_UPDATE=True + +# InvenTree superuser account details +# Un-comment (and complete) these lines to auto-create an admin account +#INVENTREE_ADMIN_USER= +#INVENTREE_ADMIN_PASSWORD= +#INVENTREE_ADMIN_EMAIL= + # Database configuration options -# Note: The example setup is for a PostgreSQL database +# DO NOT CHANGE THESE SETTINGS (unless you really know what you are doing) INVENTREE_DB_ENGINE=postgresql INVENTREE_DB_NAME=inventree INVENTREE_DB_HOST=inventree-db INVENTREE_DB_PORT=5432 -# Redis cache setup (disabled by default) -# Un-comment the following lines to enable Redis cache -# Note that you will also have to run docker-compose with the --profile redis command -# Refer to settings.py for other cache options -#INVENTREE_CACHE_HOST=inventree-cache -#INVENTREE_CACHE_PORT=6379 +# Database credentials - These should be changed from the default values! +# Note: These are *NOT* the InvenTree server login credentials, +# they are the credentials for the PostgreSQL database +INVENTREE_DB_USER=pguser +INVENTREE_DB_PASSWORD=pgpassword + +# Redis cache setup +# Refer to the documentation for other cache options +INVENTREE_CACHE_ENABLED=True +INVENTREE_CACHE_HOST=inventree-cache +INVENTREE_CACHE_PORT=6379 # Options for gunicorn server -INVENTREE_GUNICORN_TIMEOUT=30 - -# Enable custom plugins? -INVENTREE_PLUGINS_ENABLED=False - -# Image tag that should be used -INVENTREE_IMAGE=inventree/inventree:0.11.3 -REDIS_IMAGE=redis:7.0-alpine -NGINX_IMAGE=nginxinc/nginx-unprivileged:stable-alpine -# Postgres image must match version of pgdump in inventree image -POSTGRES_IMAGE=postgres:13-alpine - -# InvenTree admin account details -# make sure to use secure credentials these lines to auto-create an admin acount -INVENTREE_ADMIN_USER=admin -INVENTREE_ADMIN_PASSWORD=password -INVENTREE_ADMIN_EMAIL=admin@inventree.example - -# Database credentials - These must be configured before running -# Change from the default values! -INVENTREE_DB_USER=inventree -INVENTREE_DB_PASSWORD=password - -# Django configuration -INVENTREE_SECRET_KEY=some-secret-key -INVENTREE_SITE_URL=inventree.example.com,www.inventree.example.com - -# SSO Config -INVENTREE_SOCIAL_BACKENDS=allauth.socialaccount.providers.openid_connect - -HKNG_OIDC_URL=https://keycloak.example.com/realms/master/.well-known/openid-configuration -HKNG_OIDC_CLIENT_ID=example-client -HKNG_OIDC_SECRET=example-secret +INVENTREE_GUNICORN_TIMEOUT=90 From 67e437323bff6a2f48052eb07ea8f17e631998e3 Mon Sep 17 00:00:00 2001 From: Philipp Date: Wed, 17 Sep 2025 15:26:22 +0200 Subject: [PATCH 23/24] upgrade postgres to 17 --- compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compose.yml b/compose.yml index f5af5c6..ff4f7a4 100644 --- a/compose.yml +++ b/compose.yml @@ -42,7 +42,7 @@ services: # Database service # Use PostgreSQL as the database backend inventree-db: - image: postgres:13 + image: postgres:17 container_name: inventree-db expose: - ${INVENTREE_DB_PORT:-5432}/tcp From 57f5b22ab127a67a77e4c42f0a4f8958631fd4c4 Mon Sep 17 00:00:00 2001 From: Philipp Date: Wed, 17 Sep 2025 15:26:23 +0200 Subject: [PATCH 24/24] switch to alpine version of postgres --- compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compose.yml b/compose.yml index ff4f7a4..8f80693 100644 --- a/compose.yml +++ b/compose.yml @@ -42,7 +42,7 @@ services: # Database service # Use PostgreSQL as the database backend inventree-db: - image: postgres:17 + image: postgres:17-alpine container_name: inventree-db expose: - ${INVENTREE_DB_PORT:-5432}/tcp