Compare commits

...

3 Commits

Author SHA1 Message Date
Patrick Jentsch
5fc3015bf1 rename functions to indicate that they should not be imported directly 2024-09-26 15:34:52 +02:00
Patrick Jentsch
5f05cedf5e Make the "daemon" (now tasks) more understandable 2024-09-26 15:33:32 +02:00
Patrick Jentsch
aabea234fe More simplification 2024-09-26 14:45:05 +02:00
7 changed files with 85 additions and 119 deletions

View File

@ -2,6 +2,7 @@ from apifairy import APIFairy
from config import Config
from docker import DockerClient
from flask import Flask
from flask.logging import default_handler
from flask_apscheduler import APScheduler
from flask_assets import Environment
from flask_login import LoginManager
@ -12,11 +13,12 @@ from flask_paranoid import Paranoid
from flask_socketio import SocketIO
from flask_sqlalchemy import SQLAlchemy
from flask_hashids import Hashids
from logging import Formatter, StreamHandler
from werkzeug.middleware.proxy_fix import ProxyFix
docker_client = DockerClient.from_env()
apifairy = APIFairy()
assets = Environment()
db = SQLAlchemy()
@ -36,82 +38,42 @@ def create_app(config: Config = Config) -> Flask:
app = Flask(__name__)
app.config.from_object(config)
_configure_logging(app)
_configure_middlewares(app)
_init_docker_client(app)
_init_extensions(app)
_register_blueprints(app)
_register_socketio_namespaces(app)
_register_db_event_listeners(app)
@app.before_request
def log_headers():
from flask import request
print(request.__dict__)
return app
def _configure_logging(app: Flask):
from flask.logging import default_handler
from logging import Formatter, StreamHandler
log_date_format: str = app.config['NOPAQUE_LOG_DATE_FORMAT']
log_format: str = app.config['NOPAQUE_LOG_FORMAT']
log_level: str = app.config['NOPAQUE_LOG_LEVEL']
formatter = Formatter(fmt=log_format, datefmt=log_date_format)
handler = StreamHandler()
handler.setFormatter(formatter)
handler.setLevel(log_level)
app.logger.removeHandler(default_handler)
app.logger.addHandler(handler)
def _configure_middlewares(app: Flask):
proxy_fix_enabled: bool = app.config['NOPAQUE_PROXY_FIX_ENABLED']
if proxy_fix_enabled:
from werkzeug.middleware.proxy_fix import ProxyFix
proxy_fix_x_for: int = app.config['NOPAQUE_PROXY_FIX_X_FOR']
proxy_fix_x_host: int = app.config['NOPAQUE_PROXY_FIX_X_HOST']
proxy_fix_x_port: int = app.config['NOPAQUE_PROXY_FIX_X_PORT']
proxy_fix_x_prefix: int = app.config['NOPAQUE_PROXY_FIX_X_PREFIX']
proxy_fix_x_proto: int = app.config['NOPAQUE_PROXY_FIX_X_PROTO']
app.wsgi_app = ProxyFix(
app.wsgi_app,
x_for=proxy_fix_x_for,
x_host=proxy_fix_x_host,
x_port=proxy_fix_x_port,
x_prefix=proxy_fix_x_prefix,
x_proto=proxy_fix_x_proto
)
def _init_docker_client(app: Flask):
registry: str = app.config['NOPAQUE_DOCKER_REGISTRY']
username: str = app.config['NOPAQUE_DOCKER_REGISTRY_USERNAME']
password: str = app.config['NOPAQUE_DOCKER_REGISTRY_PASSWORD']
docker_client.login(
username,
password=password,
registry=registry
# region Logging
log_formatter = Formatter(
fmt=app.config['NOPAQUE_LOG_FORMAT'],
datefmt=app.config['NOPAQUE_LOG_DATE_FORMAT']
)
log_handler = StreamHandler()
log_handler.setFormatter(log_formatter)
log_handler.setLevel(app.config['NOPAQUE_LOG_LEVEL'])
app.logger.setLevel('DEBUG')
app.logger.removeHandler(default_handler)
app.logger.addHandler(log_handler)
# endregion Logging
# region Middlewares
if app.config['NOPAQUE_PROXY_FIX_ENABLED']:
app.wsgi_app = ProxyFix(
app.wsgi_app,
x_for=app.config['NOPAQUE_PROXY_FIX_X_FOR'],
x_host=app.config['NOPAQUE_PROXY_FIX_X_HOST'],
x_port=app.config['NOPAQUE_PROXY_FIX_X_PORT'],
x_prefix=app.config['NOPAQUE_PROXY_FIX_X_PREFIX'],
x_proto=app.config['NOPAQUE_PROXY_FIX_X_PROTO']
)
# endregion Middlewares
# region Extensions
docker_client.login(
app.config['NOPAQUE_DOCKER_REGISTRY_USERNAME'],
password=app.config['NOPAQUE_DOCKER_REGISTRY_PASSWORD'],
registry=app.config['NOPAQUE_DOCKER_REGISTRY']
)
def _init_extensions(app: Flask):
from typing import Callable
from .daemon import daemon
from .models import AnonymousUser, User
is_primary_instance: bool = app.config['NOPAQUE_IS_PRIMARY_INSTANCE']
socketio_message_queue_uri: str = app.config['NOPAQUE_SOCKETIO_MESSAGE_QUEUE_URI']
login_user_loader_callback: Callable[[int], User | None] = lambda user_id: User.query.get(int(user_id))
apifairy.init_app(app)
assets.init_app(app)
db.init_app(app)
@ -119,19 +81,17 @@ def _init_extensions(app: Flask):
login.init_app(app)
login.anonymous_user = AnonymousUser
login.login_view = 'auth.login'
login.user_loader(login_user_loader_callback)
login.user_loader(lambda user_id: User.query.get(int(user_id)))
ma.init_app(app)
mail.init_app(app)
migrate.init_app(app, db)
paranoid.init_app(app)
paranoid.redirect_view = '/'
scheduler.init_app(app)
if is_primary_instance:
scheduler.add_job('daemon', daemon, args=(app,), seconds=3, trigger='interval')
socketio.init_app(app, message_queue=socketio_message_queue_uri)
socketio.init_app(app, message_queue=app.config['NOPAQUE_SOCKETIO_MESSAGE_QUEUE_URI'])
# endregion Extensions
def _register_blueprints(app: Flask):
# region Blueprints
from .admin import bp as admin_blueprint
app.register_blueprint(admin_blueprint, url_prefix='/admin')
@ -167,6 +127,28 @@ def _register_blueprints(app: Flask):
from .workshops import bp as workshops_blueprint
app.register_blueprint(workshops_blueprint, url_prefix='/workshops')
# endregion Blueprints
# region SocketIO Namespaces
from .corpora.cqi_over_sio import CQiOverSocketIO
socketio.on_namespace(CQiOverSocketIO('/cqi_over_sio'))
# endregion SocketIO Namespaces
# region Database event Listeners
from .models.event_listeners import register_event_listeners
register_event_listeners()
# endregion Database event Listeners
# region Add scheduler jobs
if app.config['NOPAQUE_IS_PRIMARY_INSTANCE']:
from .tasks import handle_corpora
scheduler.add_job('handle_corpora', handle_corpora, seconds=3, trigger='interval')
from .tasks import handle_jobs
scheduler.add_job('handle_jobs', handle_jobs, seconds=3, trigger='interval')
# endregion Add scheduler jobs
return app
# def _add_admin_views():
@ -181,15 +163,3 @@ def _register_blueprints(app: Flask):
# if not issubclass(v, db.Model):
# continue
# admin.add_view(ModelView(v, db.session, category='Database'))
def _register_socketio_namespaces(app: Flask):
from .corpora.cqi_over_sio import CQiOverSocketIO
socketio.on_namespace(CQiOverSocketIO('/cqi_over_sio'))
def _register_db_event_listeners(app: Flask):
from .models.event_listeners import register_event_listeners
register_event_listeners()

View File

@ -1,11 +0,0 @@
from flask import Flask
from app import db
from .corpus_utils import check_corpora
from .job_utils import check_jobs
def daemon(app: Flask):
with app.app_context():
check_corpora()
check_jobs()
db.session.commit()

2
app/tasks/__init__.py Normal file
View File

@ -0,0 +1,2 @@
from .handle_corpora import task as handle_corpora
from .handle_jobs import task as handle_jobs

View File

@ -1,4 +1,4 @@
from app import docker_client
from app import db, docker_client, scheduler
from app.models import Corpus, CorpusStatus
from flask import current_app
import docker
@ -6,7 +6,11 @@ import os
import shutil
def check_corpora():
def task():
with scheduler.app.app_context():
_handle_corpora()
def _handle_corpora():
corpora = Corpus.query.all()
for corpus in [x for x in corpora if x.status == CorpusStatus.SUBMITTED]:
_create_build_corpus_service(corpus)
@ -22,6 +26,7 @@ def check_corpora():
_create_cqpserver_container(corpus)
for corpus in [x for x in corpora if x.status == CorpusStatus.CANCELING_ANALYSIS_SESSION]:
_remove_cqpserver_container(corpus)
db.session.commit()
def _create_build_corpus_service(corpus):
''' # Docker service settings # '''

View File

@ -1,4 +1,4 @@
from app import db, docker_client, hashids
from app import db, docker_client, hashids, scheduler
from app.models import (
Job,
JobResult,
@ -15,7 +15,11 @@ import os
import shutil
def check_jobs():
def task():
with scheduler.app.app_context():
_handle_jobs()
def _handle_jobs():
jobs = Job.query.all()
for job in [x for x in jobs if x.status == JobStatus.SUBMITTED]:
_create_job_service(job)
@ -23,6 +27,7 @@ def check_jobs():
_checkout_job_service(job)
for job in [x for x in jobs if x.status == JobStatus.CANCELING]:
_remove_job_service(job)
db.session.commit()
def _create_job_service(job):
''' # Docker service settings # '''

View File

@ -15,7 +15,7 @@ class Config:
''' Configuration class for the Flask application. '''
# region APIFairy
APIFAIRY_TITLE = 'nopaque'
APIFAIRY_TITLE = 'nopaque API'
APIFAIRY_VERSION = '0.0.1'
APIFAIRY_APISPEC_PATH = '/api/apispec.json'
APIFAIRY_UI = 'swagger_ui'
@ -60,10 +60,7 @@ class Config:
# region Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = os.environ.get(
'SQLALCHEMY_DATABASE_URI',
f'sqlite:///{BASE_DIR}/data.sqlite'
)
SQLALCHEMY_DATABASE_URI = os.environ.get('SQLALCHEMY_DATABASE_URI', f'sqlite:///{BASE_DIR}/data.sqlite')
SQLALCHEMY_RECORD_QUERIES = True
SQLALCHEMY_TRACK_MODIFICATIONS = False
# endregion Flask-SQLAlchemy
@ -83,14 +80,8 @@ class Config:
NOPAQUE_DOCKER_REGISTRY_USERNAME = os.environ.get('NOPAQUE_DOCKER_REGISTRY_USERNAME')
NOPAQUE_DOCKER_REGISTRY_PASSWORD = os.environ.get('NOPAQUE_DOCKER_REGISTRY_PASSWORD')
NOPAQUE_LOG_DATE_FORMAT = os.environ.get(
'NOPAQUE_LOG_DATE_FORMAT',
'%Y-%m-%d %H:%M:%S'
)
NOPAQUE_LOG_FORMAT = os.environ.get(
'NOPAQUE_LOG_DATE_FORMAT',
'[%(asctime)s] %(levelname)s in %(pathname)s (function: %(funcName)s, line: %(lineno)d): %(message)s'
)
NOPAQUE_LOG_DATE_FORMAT = os.environ.get('NOPAQUE_LOG_DATE_FORMAT', '%Y-%m-%d %H:%M:%S')
NOPAQUE_LOG_FORMAT = os.environ.get('NOPAQUE_LOG_FORMAT','[%(asctime)s] %(levelname)s: %(message)s')
NOPAQUE_LOG_LEVEL = os.environ.get('NOPAQUE_LOG_LEVEL', 'WARNING')
NOPAQUE_PROXY_FIX_ENABLED = os.environ.get('NOPAQUE_PROXY_FIX_ENABLED', 'false').lower() == 'true'

View File

@ -7,6 +7,10 @@
# Flask #
# https://flask.palletsprojects.com/en/1.1.x/config/ #
##############################################################################
# CHOOSE ONE: False, True
# DEFAULT: False
# FLASK_DEBUG=
# CHOOSE ONE: http, https
# DEFAULT: http
# PREFERRED_URL_SCHEME=
@ -138,7 +142,7 @@ NOPAQUE_DOCKER_REGISTRY_PASSWORD=
# DEFAULT: [%(asctime)s] %(levelname)s in %(pathname)s (function: %(funcName)s, line: %(lineno)d): %(message)s
# NOPAQUE_LOG_FORMAT=
# DEFAULT: DEBUG if FLASK_DEBUG == True else WARNING
# DEFAULT: WARNING
# CHOOSE ONE: CRITICAL, ERROR, WARNING, INFO, DEBUG
# NOPAQUE_LOG_LEVEL=