Streamline setup process and init code

This commit is contained in:
Patrick Jentsch 2024-09-25 10:45:53 +02:00
parent 81c6f32a35
commit c7ca674b2f
8 changed files with 165 additions and 202 deletions

2
.gitignore vendored
View File

@ -2,8 +2,6 @@
app/static/gen/ app/static/gen/
volumes/ volumes/
docker-compose.override.yml docker-compose.override.yml
logs/
!logs/dummy
*.env *.env
*.pjentsch-testing *.pjentsch-testing

View File

@ -46,7 +46,6 @@ COPY --chown=nopaque:nopaque app app
COPY --chown=nopaque:nopaque migrations migrations COPY --chown=nopaque:nopaque migrations migrations
COPY --chown=nopaque:nopaque tests tests COPY --chown=nopaque:nopaque tests tests
COPY --chown=nopaque:nopaque boot.sh config.py wsgi.py ./ COPY --chown=nopaque:nopaque boot.sh config.py wsgi.py ./
RUN mkdir logs
EXPOSE 5000 EXPOSE 5000

View File

@ -35,7 +35,7 @@ username@hostname:~$ sudo mount --types cifs --options gid=${USER},password=nopa
# Clone the nopaque repository # Clone the nopaque repository
username@hostname:~$ git clone https://gitlab.ub.uni-bielefeld.de/sfb1288inf/nopaque.git username@hostname:~$ git clone https://gitlab.ub.uni-bielefeld.de/sfb1288inf/nopaque.git
# Create data directories # Create data directories
username@hostname:~$ mkdir data/{db,logs,mq} username@hostname:~$ mkdir volumes/{db,mq}
username@hostname:~$ cp db.env.tpl db.env username@hostname:~$ cp db.env.tpl db.env
username@hostname:~$ cp .env.tpl .env username@hostname:~$ cp .env.tpl .env
# Fill out the variables within these files. # Fill out the variables within these files.

View File

@ -14,10 +14,11 @@ from flask_sqlalchemy import SQLAlchemy
from flask_hashids import Hashids from flask_hashids import Hashids
docker_client = DockerClient.from_env()
apifairy = APIFairy() apifairy = APIFairy()
assets = Environment() assets = Environment()
db = SQLAlchemy() db = SQLAlchemy()
docker_client = DockerClient()
hashids = Hashids() hashids = Hashids()
login = LoginManager() login = LoginManager()
ma = Marshmallow() ma = Marshmallow()
@ -28,80 +29,129 @@ scheduler = APScheduler()
socketio = SocketIO() socketio = SocketIO()
# TODO: Create export for lemmatized corpora
def create_app(config: Config = Config) -> Flask: def create_app(config: Config = Config) -> Flask:
''' Creates an initialized Flask (WSGI Application) object. ''' ''' Creates an initialized Flask object. '''
app = Flask(__name__) app = Flask(__name__)
app.config.from_object(config) app.config.from_object(config)
config.init_app(app)
_configure_logging(app)
_configure_middlewares(app)
_init_docker_client(app)
_init_extensions(app)
_register_blueprints(app)
_register_socketio_namespaces(app)
_register_db_event_listeners(app)
return app
def _configure_logging(app: Flask):
from flask.logging import default_handler
from logging import Formatter, StreamHandler
log_date_format: str = app.config['NOPAQUE_LOG_DATE_FORMAT']
log_format: str = app.config['NOPAQUE_LOG_FORMAT']
log_level: str = app.config['NOPAQUE_LOG_LEVEL']
formatter = Formatter(fmt=log_format, datefmt=log_date_format)
handler = StreamHandler()
handler.setFormatter(formatter)
handler.setLevel(log_level)
app.logger.removeHandler(default_handler)
app.logger.addHandler(handler)
def _configure_middlewares(app: Flask):
from werkzeug.middleware.proxy_fix import ProxyFix
if app.config['NOPAQUE_PROXY_FIX_ENABLED']:
app.wsgi_app = ProxyFix(
app.wsgi_app,
x_for=app.config['NOPAQUE_PROXY_FIX_X_FOR'],
x_host=app.config['NOPAQUE_PROXY_FIX_X_HOST'],
x_port=app.config['NOPAQUE_PROXY_FIX_X_PORT'],
x_prefix=app.config['NOPAQUE_PROXY_FIX_X_PREFIX'],
x_proto=app.config['NOPAQUE_PROXY_FIX_X_PROTO']
)
def _init_docker_client(app: Flask):
registry: str = app.config['NOPAQUE_DOCKER_REGISTRY']
username: str = app.config['NOPAQUE_DOCKER_REGISTRY_USERNAME']
password: str = app.config['NOPAQUE_DOCKER_REGISTRY_PASSWORD']
docker_client.login( docker_client.login(
app.config['NOPAQUE_DOCKER_REGISTRY_USERNAME'], username,
password=app.config['NOPAQUE_DOCKER_REGISTRY_PASSWORD'], password=password,
registry=app.config['NOPAQUE_DOCKER_REGISTRY'] registry=registry
) )
def _init_extensions(app: Flask):
from typing import Callable
from .daemon import daemon
from .models import AnonymousUser, User
is_primary_instance: bool = app.config['NOPAQUE_IS_PRIMARY_INSTANCE']
socketio_message_queue_uri: str = app.config['NOPAQUE_SOCKETIO_MESSAGE_QUEUE_URI']
login_user_loader_callback: Callable[[int], User | None] = lambda user_id: User.query.get(int(user_id))
apifairy.init_app(app) apifairy.init_app(app)
assets.init_app(app) assets.init_app(app)
db.init_app(app) db.init_app(app)
hashids.init_app(app) hashids.init_app(app)
login.init_app(app) login.init_app(app)
login.anonymous_user = AnonymousUser
login.login_view = 'auth.login'
login.user_loader(login_user_loader_callback)
ma.init_app(app) ma.init_app(app)
mail.init_app(app) mail.init_app(app)
migrate.init_app(app, db) migrate.init_app(app, db)
paranoid.init_app(app) paranoid.init_app(app)
scheduler.init_app(app)
socketio.init_app(app, message_queue=app.config['NOPAQUE_SOCKETIO_MESSAGE_QUEUE_URI']) # noqa
from .models import AnonymousUser, User
login.anonymous_user = AnonymousUser
login.login_view = 'auth.login'
@login.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
paranoid.redirect_view = '/' paranoid.redirect_view = '/'
scheduler.init_app(app)
if is_primary_instance:
scheduler.add_job('daemon', daemon, args=(app,), seconds=3, trigger='interval')
socketio.init_app(app, message_queue=socketio_message_queue_uri)
from .models.event_listeners import register_event_listeners
register_event_listeners()
def _register_blueprints(app: Flask):
from .admin import bp as admin_blueprint from .admin import bp as admin_blueprint
app.register_blueprint(admin_blueprint, url_prefix='/admin')
from .api import bp as api_blueprint from .api import bp as api_blueprint
app.register_blueprint(api_blueprint, url_prefix='/api')
from .auth import bp as auth_blueprint from .auth import bp as auth_blueprint
app.register_blueprint(auth_blueprint)
from .contributions import bp as contributions_blueprint from .contributions import bp as contributions_blueprint
app.register_blueprint(contributions_blueprint, url_prefix='/contributions')
from .corpora import bp as corpora_blueprint from .corpora import bp as corpora_blueprint
from .corpora.cqi_over_sio import CQiNamespace
app.register_blueprint(corpora_blueprint, cli_group='corpus', url_prefix='/corpora')
socketio.on_namespace(CQiNamespace('/cqi_over_sio'))
from .errors import bp as errors_bp from .errors import bp as errors_bp
app.register_blueprint(errors_bp)
from .jobs import bp as jobs_blueprint from .jobs import bp as jobs_blueprint
app.register_blueprint(jobs_blueprint, url_prefix='/jobs')
from .main import bp as main_blueprint from .main import bp as main_blueprint
app.register_blueprint(main_blueprint, cli_group=None)
from .services import bp as services_blueprint from .services import bp as services_blueprint
app.register_blueprint(services_blueprint, url_prefix='/services')
from .settings import bp as settings_blueprint from .settings import bp as settings_blueprint
app.register_blueprint(settings_blueprint, url_prefix='/settings')
from .users import bp as users_blueprint from .users import bp as users_blueprint
app.register_blueprint(users_blueprint, cli_group='user', url_prefix='/users')
from .workshops import bp as workshops_blueprint from .workshops import bp as workshops_blueprint
app.register_blueprint(admin_blueprint, url_prefix='/admin')
app.register_blueprint(api_blueprint, url_prefix='/api')
app.register_blueprint(auth_blueprint)
app.register_blueprint(contributions_blueprint, url_prefix='/contributions')
app.register_blueprint(corpora_blueprint, cli_group='corpus', url_prefix='/corpora')
app.register_blueprint(errors_bp)
app.register_blueprint(jobs_blueprint, url_prefix='/jobs')
app.register_blueprint(main_blueprint, cli_group=None)
app.register_blueprint(services_blueprint, url_prefix='/services')
app.register_blueprint(settings_blueprint, url_prefix='/settings')
app.register_blueprint(users_blueprint, cli_group='user', url_prefix='/users')
app.register_blueprint(workshops_blueprint, url_prefix='/workshops') app.register_blueprint(workshops_blueprint, url_prefix='/workshops')
return app
def _register_socketio_namespaces(app: Flask):
from .corpora.cqi_over_sio import CQiOverSocketIO
socketio.on_namespace(CQiOverSocketIO('/cqi_over_sio'))
def _register_db_event_listeners(app: Flask):
from .models.event_listeners import register_event_listeners
register_event_listeners()

View File

@ -19,7 +19,7 @@ This package tunnels the Corpus Query interface (CQi) protocol through
Socket.IO (SIO) by tunneling CQi API calls through an event called "exec". Socket.IO (SIO) by tunneling CQi API calls through an event called "exec".
Basic concept: Basic concept:
1. A client connects to the "/cqi_over_sio" namespace. 1. A client connects to the namespace.
2. The client emits the "init" event and provides a corpus id for the corpus 2. The client emits the "init" event and provides a corpus id for the corpus
that should be analysed in this session. that should be analysed in this session.
1.1 The analysis session counter of the corpus is incremented. 1.1 The analysis session counter of the corpus is incremented.
@ -28,14 +28,13 @@ Basic concept:
1.4 Connect the CQiClient to the server. 1.4 Connect the CQiClient to the server.
1.5 Save the CQiClient, the Lock and the corpus id in the session for 1.5 Save the CQiClient, the Lock and the corpus id in the session for
subsequential use. subsequential use.
2. The client emits the "exec" event provides the name of a CQi API function 3. The client emits "exec" events, within which it provides the name of a CQi
arguments (optional). API function and the corresponding arguments.
- The event "exec" handler will execute the function, make sure that the 3.1 The "exec" event handler will execute the function, make sure that
result is serializable and returns the result back to the client. the result is serializable and returns the result back to the client.
4. Wait for more events 4. The client disconnects from the namespace
5. The client disconnects from the "/cqi_over_sio" namespace 4.1 The analysis session counter of the corpus is decremented.
1.1 The analysis session counter of the corpus is decremented. 4.2 The CQiClient and (Mutex) Lock belonging to it are teared down.
1.2 The CQiClient and (Mutex) Lock belonging to it are teared down.
''' '''
CQI_API_FUNCTION_NAMES: List[str] = [ CQI_API_FUNCTION_NAMES: List[str] = [
@ -86,7 +85,7 @@ CQI_API_FUNCTION_NAMES: List[str] = [
] ]
class CQiNamespace(Namespace): class CQiOverSocketIO(Namespace):
@socketio_login_required @socketio_login_required
def on_connect(self): def on_connect(self):
pass pass

View File

@ -1,5 +1,5 @@
from app import db
from flask import Flask from flask import Flask
from app import db
from .corpus_utils import check_corpora from .corpus_utils import check_corpora
from .job_utils import check_jobs from .job_utils import check_jobs

179
config.py
View File

@ -1,47 +1,54 @@
from dotenv import load_dotenv from dotenv import load_dotenv
from flask import Flask
from logging.handlers import RotatingFileHandler
from pathlib import Path from pathlib import Path
from werkzeug.middleware.proxy_fix import ProxyFix
import logging
import os import os
basedir = os.path.abspath(os.path.dirname(__file__)) BASE_DIR = os.path.abspath(os.path.dirname(__file__))
load_dotenv(os.path.join(basedir, 'nopaque.env')) ENV_FILE = os.path.join(BASE_DIR, 'nopaque.env')
if os.path.isfile(ENV_FILE):
load_dotenv(ENV_FILE)
class Config: class Config:
''' APIFairy ''' ''' Configuration class for the Flask application. '''
# region APIFairy
APIFAIRY_TITLE = 'nopaque' APIFAIRY_TITLE = 'nopaque'
APIFAIRY_VERSION = '0.0.1' APIFAIRY_VERSION = '0.0.1'
APIFAIRY_UI = 'swagger_ui'
APIFAIRY_APISPEC_PATH = '/api/apispec.json' APIFAIRY_APISPEC_PATH = '/api/apispec.json'
APIFAIRY_UI_PATH = '/api' APIFAIRY_UI = 'swagger_ui'
APIFAIRY_UI_PATH = '/api/docs'
# endregion APIFairy
''' # Flask # '''
APPLICATION_ROOT = os.environ.get('APPLICATION_ROOT', '/') # region Flask
DEBUG = os.environ.get('FLASK_DEBUG', 'false').lower() == 'true'
PREFERRED_URL_SCHEME = os.environ.get('PREFERRED_URL_SCHEME', 'http') PREFERRED_URL_SCHEME = os.environ.get('PREFERRED_URL_SCHEME', 'http')
SECRET_KEY = os.environ.get('SECRET_KEY', 'hard to guess string') SECRET_KEY = os.environ.get('SECRET_KEY', 'hard to guess string')
SERVER_NAME = os.environ.get('SERVER_NAME', 'localhost:5000') SERVER_NAME = os.environ.get('SERVER_NAME', 'localhost:5000')
SESSION_COOKIE_SECURE = \ SESSION_COOKIE_SECURE = os.environ.get('SESSION_COOKIE_SECURE', 'false').lower() == 'true'
os.environ.get('SESSION_COOKIE_SECURE', 'false').lower() == 'true' # endregion Flask
''' # Flask-APScheduler # '''
JOBS = []
''' # Flask-Assets ''' # region Flask-Assets
ASSETS_DEBUG = os.environ.get('ASSETS_DEBUG', 'false').lower() == 'true' ASSETS_DEBUG = os.environ.get('ASSETS_DEBUG', 'false').lower() == 'true'
# endregion Flask-Assets
''' # Flask-Hashids '''
# region Flask-Hashids
HASHIDS_MIN_LENGTH = int(os.environ.get('HASHIDS_MIN_LENGTH', '16')) HASHIDS_MIN_LENGTH = int(os.environ.get('HASHIDS_MIN_LENGTH', '16'))
HASHIDS_SALT = os.environ.get('HASHIDS_SALT', 'hard to guess string') HASHIDS_SALT = os.environ.get('HASHIDS_SALT', 'hard to guess string')
# endregion Flask-Hashids
''' # Flask-Login # '''
REMEMBER_COOKIE_SECURE = \
os.environ.get('REMEMBER_COOKIE_SECURE', 'false').lower() == 'true'
''' # Flask-Mail # ''' # region Flask-Login
REMEMBER_COOKIE_SECURE = os.environ.get('REMEMBER_COOKIE_SECURE', 'false').lower() == 'true'
# endregion Flask-Login
# region Flask-Mail
MAIL_DEFAULT_SENDER = os.environ.get('MAIL_DEFAULT_SENDER') MAIL_DEFAULT_SENDER = os.environ.get('MAIL_DEFAULT_SENDER')
MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD') MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD')
MAIL_SERVER = os.environ.get('MAIL_SERVER') MAIL_SERVER = os.environ.get('MAIL_SERVER')
@ -49,125 +56,53 @@ class Config:
MAIL_USERNAME = os.environ.get('MAIL_USERNAME') MAIL_USERNAME = os.environ.get('MAIL_USERNAME')
MAIL_USE_SSL = os.environ.get('MAIL_USE_SSL', 'false').lower() == 'true' MAIL_USE_SSL = os.environ.get('MAIL_USE_SSL', 'false').lower() == 'true'
MAIL_USE_TLS = os.environ.get('MAIL_USE_TLS', 'false').lower() == 'true' MAIL_USE_TLS = os.environ.get('MAIL_USE_TLS', 'false').lower() == 'true'
# endregion Flask-Mail
''' # Flask-SQLAlchemy # '''
SQLALCHEMY_DATABASE_URI = \ # region Flask-SQLAlchemy
os.environ.get('SQLALCHEMY_DATABASE_URI') \ SQLALCHEMY_DATABASE_URI = os.environ.get(
or f'sqlite:///{os.path.join(basedir, "data.sqlite")}' 'SQLALCHEMY_DATABASE_URI',
f'sqlite:///{BASE_DIR}/data.sqlite'
)
SQLALCHEMY_RECORD_QUERIES = True SQLALCHEMY_RECORD_QUERIES = True
SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_TRACK_MODIFICATIONS = False
# endregion Flask-SQLAlchemy
''' # nopaque # '''
# region nopaque
NOPAQUE_ADMIN = os.environ.get('NOPAQUE_ADMIN') NOPAQUE_ADMIN = os.environ.get('NOPAQUE_ADMIN')
NOPAQUE_DATA_DIR = Path(os.environ.get('NOPAQUE_DATA_PATH', '/mnt/nopaque')) NOPAQUE_DATA_DIR = Path(os.environ.get('NOPAQUE_DATA_PATH', '/mnt/nopaque'))
NOPAQUE_IS_PRIMARY_INSTANCE = \ NOPAQUE_IS_PRIMARY_INSTANCE = os.environ.get('NOPAQUE_IS_PRIMARY_INSTANCE', 'true').lower() == 'true'
os.environ.get('NOPAQUE_IS_PRIMARY_INSTANCE', 'true').lower() == 'true'
NOPAQUE_MAIL_SUBJECT_PREFIX = '[nopaque]' NOPAQUE_MAIL_SUBJECT_PREFIX = '[nopaque]'
NOPAQUE_SERVICE_DESK = 'gitlab-ub-incoming+sfb1288inf-nopaque-1324-issue-@jura.uni-bielefeld.de' # noqa NOPAQUE_SERVICE_DESK = 'gitlab-ub-incoming+sfb1288inf-nopaque-1324-issue-@jura.uni-bielefeld.de' # noqa
NOPAQUE_SOCKETIO_MESSAGE_QUEUE_URI = \ NOPAQUE_SOCKETIO_MESSAGE_QUEUE_URI = os.environ.get('NOPAQUE_SOCKETIO_MESSAGE_QUEUE_URI')
os.environ.get('NOPAQUE_SOCKETIO_MESSAGE_QUEUE_URI')
NOPAQUE_JOB_EXPIRATION_ENABLED = os.environ.get('NOPAQUE_JOB_EXPIRATION_ENABLED', 'true').lower() == 'true'
NOPAQUE_JOB_EXPIRATION_TIME = int(os.environ.get('NOPAQUE_JOB_EXPIRATION_TIME', '120'))
NOPAQUE_DOCKER_REGISTRY = 'gitlab.ub.uni-bielefeld.de:4567' NOPAQUE_DOCKER_REGISTRY = 'gitlab.ub.uni-bielefeld.de:4567'
NOPAQUE_DOCKER_IMAGE_PREFIX = f'{NOPAQUE_DOCKER_REGISTRY}/sfb1288inf/' NOPAQUE_DOCKER_IMAGE_PREFIX = f'{NOPAQUE_DOCKER_REGISTRY}/sfb1288inf/'
NOPAQUE_DOCKER_NETWORK_NAME = \ NOPAQUE_DOCKER_NETWORK_NAME = os.environ.get('DOCKER_NETWORK_NAME', 'nopaque')
os.environ.get('DOCKER_NETWORK_NAME', 'nopaque') NOPAQUE_DOCKER_REGISTRY_USERNAME = os.environ.get('NOPAQUE_DOCKER_REGISTRY_USERNAME')
NOPAQUE_DOCKER_REGISTRY_USERNAME = \ NOPAQUE_DOCKER_REGISTRY_PASSWORD = os.environ.get('NOPAQUE_DOCKER_REGISTRY_PASSWORD')
os.environ.get('NOPAQUE_DOCKER_REGISTRY_USERNAME')
NOPAQUE_DOCKER_REGISTRY_PASSWORD = \
os.environ.get('NOPAQUE_DOCKER_REGISTRY_PASSWORD')
NOPAQUE_LOG_DATE_FORMAT = \ NOPAQUE_LOG_DATE_FORMAT = os.environ.get(
os.environ.get('NOPAQUE_LOG_DATE_FORMAT', '%Y-%m-%d %H:%M:%S') 'NOPAQUE_LOG_DATE_FORMAT',
'%Y-%m-%d %H:%M:%S'
)
NOPAQUE_LOG_FORMAT = os.environ.get( NOPAQUE_LOG_FORMAT = os.environ.get(
'NOPAQUE_LOG_DATE_FORMAT', 'NOPAQUE_LOG_DATE_FORMAT',
'[%(asctime)s] %(levelname)s in ' '[%(asctime)s] %(levelname)s in %(pathname)s (function: %(funcName)s, line: %(lineno)d): %(message)s'
'%(pathname)s (function: %(funcName)s, line: %(lineno)d): %(message)s'
) )
NOPAQUE_LOG_FILE_ENABLED = \ NOPAQUE_LOG_LEVEL = os.environ.get('NOPAQUE_LOG_LEVEL', 'WARNING')
os.environ.get('NOPAQUE_LOG_FILE_ENABLED', 'false').lower() == 'true'
NOPAQUE_LOG_FILE_DIR = Path(os.environ.get('NOPAQUE_LOG_FILE_DIR', '/var/log/nopaque'))
NOPAQUE_LOG_FILE_LEVEL = \
os.environ.get('NOPAQUE_LOG_FILE_LEVEL', None)
NOPAQUE_LOG_STDERR_ENABLED = \
os.environ.get('NOPAQUE_LOG_STDERR_ENABLED', 'true').lower() == 'true'
NOPAQUE_LOG_STDERR_LEVEL = \
os.environ.get('NOPAQUE_LOG_STDERR_LEVEL', None)
NOPAQUE_PROXY_FIX_ENABLED = \ NOPAQUE_PROXY_FIX_ENABLED = os.environ.get('NOPAQUE_PROXY_FIX_ENABLED', 'false').lower() == 'true'
os.environ.get('NOPAQUE_PROXY_FIX_ENABLED', 'false').lower() == 'true' NOPAQUE_PROXY_FIX_X_FOR = int(os.environ.get('NOPAQUE_PROXY_FIX_X_FOR', '0'))
NOPAQUE_PROXY_FIX_X_FOR = \ NOPAQUE_PROXY_FIX_X_HOST = int(os.environ.get('NOPAQUE_PROXY_FIX_X_HOST', '0'))
int(os.environ.get('NOPAQUE_PROXY_FIX_X_FOR', '0')) NOPAQUE_PROXY_FIX_X_PORT = int(os.environ.get('NOPAQUE_PROXY_FIX_X_PORT', '0'))
NOPAQUE_PROXY_FIX_X_HOST = \ NOPAQUE_PROXY_FIX_X_PREFIX = int(os.environ.get('NOPAQUE_PROXY_FIX_X_PREFIX', '0'))
int(os.environ.get('NOPAQUE_PROXY_FIX_X_HOST', '0')) NOPAQUE_PROXY_FIX_X_PROTO = int(os.environ.get('NOPAQUE_PROXY_FIX_X_PROTO', '0'))
NOPAQUE_PROXY_FIX_X_PORT = \
int(os.environ.get('NOPAQUE_PROXY_FIX_X_PORT', '0'))
NOPAQUE_PROXY_FIX_X_PREFIX = \
int(os.environ.get('NOPAQUE_PROXY_FIX_X_PREFIX', '0'))
NOPAQUE_PROXY_FIX_X_PROTO = \
int(os.environ.get('NOPAQUE_PROXY_FIX_X_PROTO', '0'))
NOPAQUE_TRANSKRIBUS_ENABLED = \ NOPAQUE_TRANSKRIBUS_ENABLED = os.environ.get('NOPAQUE_TRANSKRIBUS_ENABLED', 'false').lower() == 'true'
os.environ.get('NOPAQUE_TRANSKRIBUS_ENABLED', 'false').lower() == 'true'
NOPAQUE_READCOOP_USERNAME = os.environ.get('NOPAQUE_READCOOP_USERNAME') NOPAQUE_READCOOP_USERNAME = os.environ.get('NOPAQUE_READCOOP_USERNAME')
NOPAQUE_READCOOP_PASSWORD = os.environ.get('NOPAQUE_READCOOP_PASSWORD') NOPAQUE_READCOOP_PASSWORD = os.environ.get('NOPAQUE_READCOOP_PASSWORD')
NOPAQUE_VERSION='1.0.2' NOPAQUE_VERSION='1.0.2'
# endregion nopaque
@staticmethod
def init_app(app: Flask):
for handler in app.logger.handlers:
app.logger.removeHandler(handler)
log_formatter = logging.Formatter(
fmt=app.config['NOPAQUE_LOG_FORMAT'],
datefmt=app.config['NOPAQUE_LOG_DATE_FORMAT']
)
if app.config['NOPAQUE_LOG_STDERR_ENABLED']:
log_stderr_level: str | None = app.config['NOPAQUE_LOG_STDERR_LEVEL']
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(log_formatter)
if log_stderr_level is not None:
stream_handler.setLevel(log_stderr_level)
app.logger.addHandler(stream_handler)
if app.config['NOPAQUE_LOG_FILE_ENABLED']:
log_file_dir: Path = app.config['NOPAQUE_LOG_FILE_DIR']
log_file_level: str | None = app.config['NOPAQUE_LOG_FILE_LEVEL']
if not log_file_dir.exists():
log_file_dir.mkdir()
rotating_file_handler = RotatingFileHandler(
log_file_dir / 'nopaque.log',
maxBytes=10_240,
backupCount=10
)
rotating_file_handler.setFormatter(log_formatter)
if log_file_level is not None:
rotating_file_handler.setLevel(log_file_level)
app.logger.addHandler(rotating_file_handler)
if app.config['NOPAQUE_PROXY_FIX_ENABLED']:
# Set up and apply the ProxyFix middleware according to the
# corresponding (NOPAQUE_PROXY_FIX_*) configurations
app.wsgi_app = ProxyFix(
app.wsgi_app,
x_for=app.config['NOPAQUE_PROXY_FIX_X_FOR'],
x_host=app.config['NOPAQUE_PROXY_FIX_X_HOST'],
x_port=app.config['NOPAQUE_PROXY_FIX_X_PORT'],
x_prefix=app.config['NOPAQUE_PROXY_FIX_X_PREFIX'],
x_proto=app.config['NOPAQUE_PROXY_FIX_X_PROTO']
)
if app.config['NOPAQUE_IS_PRIMARY_INSTANCE']:
app.config['JOBS'].append(
{
"id": "daemon",
"func": "app.daemon:daemon",
"args": (app,),
"trigger": "interval",
"seconds": 3,
}
)

View File

@ -7,9 +7,6 @@
# Flask # # Flask #
# https://flask.palletsprojects.com/en/1.1.x/config/ # # https://flask.palletsprojects.com/en/1.1.x/config/ #
############################################################################## ##############################################################################
# DEFAULT: /
# APPLICATION_ROOT=
# CHOOSE ONE: http, https # CHOOSE ONE: http, https
# DEFAULT: http # DEFAULT: http
# PREFERRED_URL_SCHEME= # PREFERRED_URL_SCHEME=
@ -141,24 +138,9 @@ NOPAQUE_DOCKER_REGISTRY_PASSWORD=
# DEFAULT: [%(asctime)s] %(levelname)s in %(pathname)s (function: %(funcName)s, line: %(lineno)d): %(message)s # DEFAULT: [%(asctime)s] %(levelname)s in %(pathname)s (function: %(funcName)s, line: %(lineno)d): %(message)s
# NOPAQUE_LOG_FORMAT= # NOPAQUE_LOG_FORMAT=
# CHOOSE ONE: False, True
# DEFAULT: False
# NOPAQUE_LOG_FILE_ENABLED=
# DEFAULT: /var/log/nopaque
# NOPAQUE_LOG_FILE_DIR=
# DEFAULT: DEBUG if FLASK_DEBUG == True else WARNING # DEFAULT: DEBUG if FLASK_DEBUG == True else WARNING
# CHOOSE ONE: CRITICAL, ERROR, WARNING, INFO, DEBUG # CHOOSE ONE: CRITICAL, ERROR, WARNING, INFO, DEBUG
# NOPAQUE_LOG_FILE_LEVEL= # NOPAQUE_LOG_LEVEL=
# CHOOSE ONE: False, True
# DEFAULT: True
# NOPAQUE_LOG_STDERR_ENABLED=
# DEFAULT: DEBUG if FLASK_DEBUG == True else WARNING
# CHOOSE ONE: CRITICAL, ERROR, WARNING, INFO, DEBUG
# NOPAQUE_LOG_STDERR_LEVEL=
# CHOOSE ONE: False, True # CHOOSE ONE: False, True
# DEFAULT: False # DEFAULT: False