diff --git a/.drone.yml b/.drone.yml index d998013..1bbfe8e 100644 --- a/.drone.yml +++ b/.drone.yml @@ -10,4 +10,4 @@ pipeline: - docker push eu.gcr.io/avatao-challengestore/tutorial-framework:${DRONE_TAG} when: event: 'tag' - branch: refs/tags/ocicat-20* + branch: refs/tags/chausie-20* diff --git a/.git-hooks/apply_hooks.sh b/.git-hooks/apply_hooks.sh deleted file mode 100755 index f95f8f4..0000000 --- a/.git-hooks/apply_hooks.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -set -eu -set -o pipefail -set -o errtrace -shopt -s expand_aliases - -[ "$(uname)" == "Darwin" ] && alias readlink="greadlink" || : - -GREEN='\033[0;32m' -NC='\033[0m' - - -here="$(dirname "$(readlink -f "$0")")" -cd "${here}/../.git/hooks" - -rm -f pre-push pre-commit || : -prepush_script="../../.git-hooks/pre-push.sh" -precommit_script="../../.git-hooks/pre-commit.sh" -[ -f "${prepush_script}" ] && ln -s "${prepush_script}" pre-push -[ -f "${precommit_script}" ] && ln -s "${precommit_script}" pre-commit - -echo -e "\n${GREEN}Done! Hooks applied, you can start committing and pushing!${NC}\n" diff --git a/.git-hooks/pre-push.sh b/.git-hooks/pre-push.sh deleted file mode 100755 index e2025ac..0000000 --- a/.git-hooks/pre-push.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env bash -set -eu -set -o pipefail -set -o errtrace -shopt -s expand_aliases - -RED='\033[0;31m' -GREEN='\033[0;32m' -NC='\033[0m' - - -echo -e "Running pylint...\n" -if pylint lib; then - echo -e "\n${GREEN}Pylint found no errors!${NC}\n" -else - echo -e "\n${RED}Pylint failed with errors${NC}\n" - exit 1 -fi diff --git a/.pylintrc b/.pylintrc index fd195bc..7004a94 100644 --- a/.pylintrc +++ b/.pylintrc @@ -6,6 +6,7 @@ disable = missing-docstring, too-few-public-methods, invalid-name [SIMILARITIES] +min-similarity-lines=8 ignore-comments=yes ignore-docstrings=yes ignore-imports=yes diff --git a/Dockerfile b/Dockerfile index 22237e8..a0f1cd7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,30 +1,31 @@ +FROM avatao/frontend-tutorial-framework:chausie-20190912 as frontend FROM avatao/debian:buster -RUN curl -sL https://deb.nodesource.com/setup_8.x | sudo -E bash - &&\ - curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add - &&\ - echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list &&\ - apt-get update &&\ +RUN apt-get update &&\ apt-get install -y --no-install-recommends \ - nodejs \ - yarn \ supervisor \ libzmq5 \ nginx \ jq \ - gettext-base &&\ - rm -rf /var/lib/apt/lists/* &&\ + gettext-base &&\ + rm -rf /var/lib/apt/lists/* &&\ ln -sf /bin/bash /bin/sh COPY requirements.txt /tmp RUN pip3 install -r /tmp/requirements.txt +RUN curl -Ls https://github.com/krallin/tini/releases/download/v0.18.0/tini-amd64 --output /bin/init &&\ + echo "12d20136605531b09a2c2dac02ccee85e1b874eb322ef6baf7561cd93f93c855 /bin/init" |\ + sha256sum --check --status &&\ + chmod 755 /bin/init + ENV TFW_PUBLIC_PORT=8888 \ TFW_WEB_PORT=4242 \ TFW_LOGIN_APP_PORT=6666 \ TFW_TERMINADO_PORT=7878 \ TFW_SUPERVISOR_HTTP_PORT=9001 \ - TFW_PUBLISHER_PORT=7654 \ - TFW_RECEIVER_PORT=8765 + TFW_PUB_PORT=7654 \ + TFW_PULL_PORT=8765 EXPOSE ${TFW_PUBLIC_PORT} @@ -39,29 +40,26 @@ ENV PYTHONPATH="/usr/local/lib" \ TFW_FRONTEND_DIR="/srv/frontend" \ TFW_DIR="/.tfw" \ TFW_SERVER_DIR="/.tfw/tfw_server" \ - TFW_SNAPSHOTS_DIR="/.tfw/snapshots" \ TFW_AUTH_KEY="/tmp/tfw-auth.key" \ + TFW_LOGS_DIR="/var/log/tfw" \ + TFW_PIPES_DIR="/run/tfw" \ + TFW_SNAPSHOTS_DIR="/tmp/tfw-snapshots" \ TFW_HISTFILE="/home/${AVATAO_USER}/.bash_history" \ PROMPT_COMMAND="history -a" -COPY bashrc /tmp -RUN echo "export HISTFILE=${TFW_HISTFILE}" >> /tmp/bashrc &&\ - cat /tmp/bashrc >> /home/${AVATAO_USER}/.bashrc - +COPY bashrc supervisor/tfw_init.sh /tmp/ COPY supervisor/supervisord.conf ${TFW_SUPERVISORD_CONF} COPY supervisor/components/ ${TFW_SUPERVISORD_COMPONENTS} COPY nginx/nginx.conf ${TFW_NGINX_CONF} COPY nginx/default.conf ${TFW_NGINX_DEFAULT} COPY nginx/components/ ${TFW_NGINX_COMPONENTS} -COPY lib LICENSE ${TFW_LIB_DIR}/ +COPY tfw ${TFW_LIB_DIR}/tfw COPY supervisor/tfw_server.py ${TFW_SERVER_DIR}/ +COPY --from=frontend /srv/dist ${TFW_FRONTEND_DIR} -RUN for dir in "${TFW_LIB_DIR}"/{tfw,tao,envvars} "/etc/nginx" "/etc/supervisor"; do \ - chown -R root:root "$dir" && chmod -R 700 "$dir"; \ - done +VOLUME ["${TFW_LOGS_DIR}", "${TFW_PIPES_DIR}"] ONBUILD ARG BUILD_CONTEXT="solvable" -ONBUILD ARG NOFRONTEND="" ONBUILD COPY ${BUILD_CONTEXT}/nginx/ ${TFW_NGINX_COMPONENTS} ONBUILD COPY ${BUILD_CONTEXT}/supervisor/ ${TFW_SUPERVISORD_COMPONENTS} @@ -69,11 +67,7 @@ ONBUILD COPY ${BUILD_CONTEXT}/supervisor/ ${TFW_SUPERVISORD_COMPONENTS} ONBUILD RUN for f in "${TFW_NGINX_DEFAULT}" ${TFW_NGINX_COMPONENTS}/*.conf; do \ envsubst "$(printenv | cut -d= -f1 | grep TFW_ | sed -e 's/^/$/g')" < $f > $f~ && mv $f~ $f ;\ done -ONBUILD VOLUME ["/etc/nginx", "/var/lib/nginx", "/var/log/nginx", "${TFW_LIB_DIR}/envvars", "${TFW_LIB_DIR}/tfw"] - -ONBUILD COPY ${BUILD_CONTEXT}/frontend /data/ -ONBUILD RUN test -z "${NOFRONTEND}" && cd /data && yarn install --frozen-lockfile || : -ONBUILD RUN test -z "${NOFRONTEND}" && cd /data && yarn build --no-progress || : -ONBUILD RUN test -z "${NOFRONTEND}" && mv /data/dist ${TFW_FRONTEND_DIR} && rm -rf /data || : +ONBUILD VOLUME ["/etc/nginx", "/var/lib/nginx", "/var/log/nginx", "${TFW_LIB_DIR}/tfw"] +ENTRYPOINT ["/bin/init", "--"] CMD exec supervisord --nodaemon --configuration ${TFW_SUPERVISORD_CONF} diff --git a/LICENSE b/LICENSE deleted file mode 100644 index be3bf3b..0000000 --- a/LICENSE +++ /dev/null @@ -1,12 +0,0 @@ -AVATAO CONFIDENTIAL - -Copyright (C) 2018 Avatao.com Innovative Learning Kft. -All Rights Reserved. - -All source code, configuration files and documentation contained herein -is, and remains the exclusive property of Avatao.com Innovative Learning Kft. -The intellectual and technical concepts contained herein are proprietary -to Avatao.com Innovative Learning Kft. and are protected by trade secret -or copyright law. Dissemination of this information or reproduction of -this material is strictly forbidden unless prior written permission is -obtained from Avatao.com Innovative Learning Kft. diff --git a/VERSION b/VERSION index 4ed6a03..63a1a7e 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -ocicat +chausie diff --git a/lib/tao/__init__.py b/lib/tao/__init__.py deleted file mode 100644 index db64b25..0000000 --- a/lib/tao/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. diff --git a/lib/tao/config/__init__.py b/lib/tao/config/__init__.py deleted file mode 100644 index 4876c01..0000000 --- a/lib/tao/config/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from .envvars import TAOENV diff --git a/lib/tao/config/envvars.py b/lib/tao/config/envvars.py deleted file mode 100644 index 260d057..0000000 --- a/lib/tao/config/envvars.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from envvars import LazyEnvironment - -TAOENV = LazyEnvironment('AVATAO_', 'taoenvtuple').environment diff --git a/lib/tfw/__init__.py b/lib/tfw/__init__.py deleted file mode 100644 index db64b25..0000000 --- a/lib/tfw/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. diff --git a/lib/tfw/components/__init__.py b/lib/tfw/components/__init__.py deleted file mode 100644 index 7c3f76d..0000000 --- a/lib/tfw/components/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from .directory_monitoring_event_handler import DirectoryMonitoringEventHandler -from .process_managing_event_handler import ProcessManagingEventHandler -from .terminal_event_handler import TerminalEventHandler -from .ide_event_handler import IdeEventHandler -from .history_monitor import HistoryMonitor, BashMonitor, GDBMonitor -from .terminal_commands import TerminalCommands -from .log_monitoring_event_handler import LogMonitoringEventHandler -from .fsm_managing_event_handler import FSMManagingEventHandler -from .snapshot_provider import SnapshotProvider -from .pipe_io_event_handler import PipeIOEventHandlerBase, PipeIOEventHandler, PipeIOServer -from .pipe_io_event_handler import TransformerPipeIOEventHandler, CommandEventHandler -from .directory_snapshotting_event_handler import DirectorySnapshottingEventHandler -from .commands_equal import CommandsEqual diff --git a/lib/tfw/components/directory_monitor.py b/lib/tfw/components/directory_monitor.py deleted file mode 100644 index f65c8ff..0000000 --- a/lib/tfw/components/directory_monitor.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from functools import wraps - -from watchdog.events import FileSystemEventHandler as FileSystemWatchdogEventHandler - -from tfw.networking.event_handlers.server_connector import ServerUplinkConnector -from tfw.decorators.rate_limiter import RateLimiter -from tfw.mixins.observer_mixin import ObserverMixin - -from tfw.config.logs import logging - -LOG = logging.getLogger(__name__) - - -class DirectoryMonitor(ObserverMixin): - def __init__(self, ide_key, directories): - self.eventhandler = IdeReloadWatchdogEventHandler(ide_key) - for directory in directories: - self.observer.schedule(self.eventhandler, directory, recursive=True) - - self.pause, self.resume = self.eventhandler.pause, self.eventhandler.resume - - @property - def ignore(self): - return self.eventhandler.ignore - - @ignore.setter - def ignore(self, value): - self.eventhandler.ignore = value if value >= 0 else 0 - - @property - def pauser(self): - return DirectoryMonitor.Pauser(self) - - class Pauser: - def __init__(self, directory_monitor): - self.directorymonitor = directory_monitor - def __enter__(self): - self.directorymonitor.pause() - def __exit__(self, exc_type, exc_val, exc_tb): - self.directorymonitor.resume() - - -class IdeReloadWatchdogEventHandler(FileSystemWatchdogEventHandler): - def __init__(self, ide_key): - super().__init__() - self.ide_key = ide_key - self.uplink = ServerUplinkConnector() - self._paused = False - self.ignore = 0 - - def pause(self): - self._paused = True - - def resume(self): - self._paused = False - - @RateLimiter(rate_per_second=2) - def on_modified(self, event): - if self._paused: - return - if self.ignore > 0: - self.ignore = self.ignore - 1 - return - LOG.debug(event) - self.uplink.send({ - 'key': self.ide_key, - 'data': {'command': 'reload'} - }) - - -def with_monitor_paused(fun): - @wraps(fun) - def wrapper(self, *args, **kwargs): - if self.monitor: - with self.monitor.pauser: - return fun(self, *args, **kwargs) - return fun(self, *args, **kwargs) - return wrapper diff --git a/lib/tfw/components/directory_monitoring_event_handler.py b/lib/tfw/components/directory_monitoring_event_handler.py deleted file mode 100644 index 8d97022..0000000 --- a/lib/tfw/components/directory_monitoring_event_handler.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from os.path import isdir, exists - -from tfw.event_handler_base import EventHandlerBase -from tfw.mixins.monitor_manager_mixin import MonitorManagerMixin -from tfw.components.directory_monitor import DirectoryMonitor -from tfw.config.logs import logging - -LOG = logging.getLogger(__name__) - - -class DirectoryMonitoringEventHandler(EventHandlerBase, MonitorManagerMixin): - def __init__(self, key, directory): - super().__init__(key) - self._directory = directory - MonitorManagerMixin.__init__( - self, - DirectoryMonitor, - key, - self._directory - ) - - self.commands = { - 'pause': self.pause, - 'resume': self.resume, - 'ignore': self.ignore, - 'selectdir': self.selectdir - } - - @property - def directory(self): - return self._directory - - @directory.setter - def directory(self, directory): - if not exists(directory) or not isdir(directory): - raise EnvironmentError('No such directory!') - self._directory = directory - - def handle_event(self, message): - try: - message['data'] = self.commands[message['data']['command']](message['data']) - return message - except KeyError: - LOG.error('IGNORING MESSAGE: Invalid message received: %s', message) - - def pause(self, data): - self.monitor.pause() - return data - - def resume(self, data): - self.monitor.resume() - return data - - def ignore(self, data): - self.monitor.ignore += data['ignore'] - return data - - def selectdir(self, data): - try: - self.directory = data['directory'] - self.reload_monitor() - return data - except EnvironmentError: - LOG.error('Failed to switch directory!') - - def cleanup(self): - self.monitor.stop() diff --git a/lib/tfw/components/fsm_managing_event_handler.py b/lib/tfw/components/fsm_managing_event_handler.py deleted file mode 100644 index 73ebe10..0000000 --- a/lib/tfw/components/fsm_managing_event_handler.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from tfw.event_handler_base import EventHandlerBase -from tfw.crypto import KeyManager, sign_message, verify_message -from tfw.config.logs import logging - -LOG = logging.getLogger(__name__) - - -class FSMManagingEventHandler(EventHandlerBase): - """ - EventHandler responsible for managing the state machine of - the framework (TFW FSM). - - tfw.networking.TFWServer instances automatically send 'trigger' - commands to the event handler listening on the 'fsm' key, - which should be an instance of this event handler. - - This event handler accepts messages that have a - data['command'] key specifying a command to be executed. - - An 'fsm_update' message is broadcasted after every successful - command. - """ - def __init__(self, key, fsm_type, require_signature=False): - super().__init__(key) - self.fsm = fsm_type() - self._fsm_updater = FSMUpdater(self.fsm) - self.auth_key = KeyManager().auth_key - self._require_signature = require_signature - - self.command_handlers = { - 'trigger': self.handle_trigger, - 'update': self.handle_update - } - - def handle_event(self, message): - try: - message = self.command_handlers[message['data']['command']](message) - if message: - fsm_update_message = self._fsm_updater.fsm_update - sign_message(self.auth_key, message) - sign_message(self.auth_key, fsm_update_message) - self.server_connector.broadcast(fsm_update_message) - return message - except KeyError: - LOG.error('IGNORING MESSAGE: Invalid message received: %s', message) - - def handle_trigger(self, message): - """ - Attempts to step the FSM with the supplied trigger. - - :param message: TFW message with a data field containing - the action to try triggering in data['value'] - """ - trigger = message['data']['value'] - if self._require_signature: - if not verify_message(self.auth_key, message): - LOG.error('Ignoring unsigned trigger command: %s', message) - return None - if self.fsm.step(trigger): - return message - return None - - def handle_update(self, message): - """ - Does nothing, but triggers an 'fsm_update' message. - """ - # pylint: disable=no-self-use - return message - - -class FSMUpdater: - def __init__(self, fsm): - self.fsm = fsm - - @property - def fsm_update(self): - return { - 'key': 'fsm_update', - 'data': self.fsm_update_data - } - - @property - def fsm_update_data(self): - valid_transitions = [ - {'trigger': trigger} - for trigger in self.fsm.get_triggers(self.fsm.state) - ] - last_fsm_event = self.fsm.event_log[-1] - last_fsm_event['timestamp'] = last_fsm_event['timestamp'].isoformat() - return { - 'current_state': self.fsm.state, - 'valid_transitions': valid_transitions, - 'in_accepted_state': self.fsm.in_accepted_state, - 'last_event': last_fsm_event - } diff --git a/lib/tfw/components/ide_event_handler.py b/lib/tfw/components/ide_event_handler.py deleted file mode 100644 index 7e242e5..0000000 --- a/lib/tfw/components/ide_event_handler.py +++ /dev/null @@ -1,255 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from os.path import isfile, join, relpath, exists, isdir, realpath -from glob import glob -from fnmatch import fnmatchcase -from typing import Iterable - -from tfw.event_handler_base import EventHandlerBase -from tfw.mixins.monitor_manager_mixin import MonitorManagerMixin -from tfw.components.directory_monitor import DirectoryMonitor -from tfw.config.logs import logging - -LOG = logging.getLogger(__name__) - - -class FileManager: # pylint: disable=too-many-instance-attributes - def __init__(self, working_directory, allowed_directories, selected_file=None, exclude=None): - self._exclude, self.exclude = None, exclude - self._allowed_directories, self.allowed_directories = None, allowed_directories - self._workdir, self.workdir = None, working_directory - self._filename, self.filename = None, selected_file or self.files[0] - - @property - def exclude(self): - return self._exclude - - @exclude.setter - def exclude(self, exclude): - if exclude is None: - return - if not isinstance(exclude, Iterable): - raise TypeError('Exclude must be Iterable!') - self._exclude = exclude - - @property - def workdir(self): - return self._workdir - - @workdir.setter - def workdir(self, directory): - if not exists(directory) or not isdir(directory): - raise EnvironmentError(f'"{directory}" is not a directory!') - if not self._is_in_allowed_dir(directory): - raise EnvironmentError(f'Directory "{directory}" is not allowed!') - self._workdir = directory - - @property - def allowed_directories(self): - return self._allowed_directories - - @allowed_directories.setter - def allowed_directories(self, directories): - self._allowed_directories = directories - - @property - def filename(self): - return self._filename - - @filename.setter - def filename(self, filename): - if filename not in self.files: - raise EnvironmentError('No such file in workdir!') - self._filename = filename - - @property - def files(self): - return [ - self._relpath(file) - for file in glob(join(self._workdir, '**/*'), recursive=True) - if isfile(file) - and self._is_in_allowed_dir(file) - and not self._is_blacklisted(file) - ] - - @property - def file_contents(self): - with open(self._filepath(self.filename), 'r', errors='surrogateescape') as ifile: - return ifile.read() - - @file_contents.setter - def file_contents(self, value): - with open(self._filepath(self.filename), 'w', errors='surrogateescape') as ofile: - ofile.write(value) - - def _is_in_allowed_dir(self, path): - return any( - realpath(path).startswith(allowed_dir) - for allowed_dir in self.allowed_directories - ) - - def _is_blacklisted(self, file): - return any( - fnmatchcase(file, blacklisted) - for blacklisted in self.exclude - ) - - def _filepath(self, filename): - return join(self._workdir, filename) - - def _relpath(self, filename): - return relpath(self._filepath(filename), start=self._workdir) - - -class IdeEventHandler(EventHandlerBase, MonitorManagerMixin): - # pylint: disable=too-many-arguments,anomalous-backslash-in-string - """ - Event handler implementing the backend of our browser based IDE. - By default all files in the directory specified in __init__ are displayed - on the fontend. Note that this is a stateful component. - - When any file in the selected directory changes they are automatically refreshed - on the frontend (this is done by listening to inotify events). - - This EventHandler accepts messages that have a data['command'] key specifying - a command to be executed. - - The API of each command is documented in their respective handler. - """ - def __init__(self, key, directory, allowed_directories, selected_file=None, exclude=None): - """ - :param key: the key this instance should listen to - :param directory: working directory which the EventHandler should serve files from - :param allowed_directories: list of directories that can be switched to using selectdir - :param selected_file: file that is selected by default - :param exclude: list of filenames that should not appear between files (for .o, .pyc, etc.) - """ - super().__init__(key) - try: - self.filemanager = FileManager( - allowed_directories=allowed_directories, - working_directory=directory, - selected_file=selected_file, - exclude=exclude - ) - except IndexError: - raise EnvironmentError( - f'No file(s) in IdeEventHandler working_directory "{directory}"!' - ) - - MonitorManagerMixin.__init__( - self, - DirectoryMonitor, - self.key, - self.filemanager.allowed_directories - ) - - self.commands = { - 'read': self.read, - 'write': self.write, - 'select': self.select, - 'selectdir': self.select_dir, - 'exclude': self.exclude - } - - def read(self, data): - """ - Read the currently selected file. - - :return dict: TFW message data containing key 'content' - (contents of the selected file) - """ - try: - data['content'] = self.filemanager.file_contents - except PermissionError: - data['content'] = 'You have no permission to open that file :(' - except FileNotFoundError: - data['content'] = 'This file was removed :(' - except Exception: # pylint: disable=broad-except - data['content'] = 'Failed to read file :(' - return data - - def write(self, data): - """ - Overwrites a file with the desired string. - - :param data: TFW message data containing key 'content' - (new file content) - - """ - self.monitor.ignore = self.monitor.ignore + 1 - try: - self.filemanager.file_contents = data['content'] - except Exception: # pylint: disable=broad-except - LOG.exception('Error writing file!') - del data['content'] - return data - - def select(self, data): - """ - Selects a file from the current directory. - - :param data: TFW message data containing 'filename' - (name of file to select relative to the current directory) - """ - try: - self.filemanager.filename = data['filename'] - except EnvironmentError: - LOG.exception('Failed to select file "%s"', data['filename']) - return data - - def select_dir(self, data): - """ - Select a new working directory to display files from. - - :param data: TFW message data containing 'directory' - (absolute path of diretory to select. - must be a path whitelisted in - self.allowed_directories) - """ - try: - self.filemanager.workdir = data['directory'] - self.reload_monitor() - try: - self.filemanager.filename = self.filemanager.files[0] - self.read(data) - except IndexError: - data['content'] = 'No files in this directory :(' - except EnvironmentError as err: - LOG.error('Failed to select directory "%s". Reason: %s', data['directory'], str(err)) - return data - - def exclude(self, data): - """ - Overwrite list of excluded files - - :param data: TFW message data containing 'exclude' - (list of unix-style filename patterns to be excluded, - e.g.: ["\*.pyc", "\*.o") - """ - try: - self.filemanager.exclude = list(data['exclude']) - except TypeError: - LOG.error('Exclude must be Iterable!') - return data - - def attach_fileinfo(self, data): - """ - Basic information included in every response to the frontend. - """ - data['filename'] = self.filemanager.filename - data['files'] = self.filemanager.files - data['directory'] = self.filemanager.workdir - - def handle_event(self, message): - try: - data = message['data'] - message['data'] = self.commands[data['command']](data) - self.attach_fileinfo(data) - return message - except KeyError: - LOG.error('IGNORING MESSAGE: Invalid message received: %s', message) - - def cleanup(self): - self.monitor.stop() diff --git a/lib/tfw/components/log_monitor.py b/lib/tfw/components/log_monitor.py deleted file mode 100644 index cf06a0a..0000000 --- a/lib/tfw/components/log_monitor.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -import logging -from os.path import dirname - -from watchdog.events import PatternMatchingEventHandler as PatternMatchingWatchdogEventHandler - -from tfw.networking.event_handlers.server_connector import ServerUplinkConnector -from tfw.decorators.rate_limiter import RateLimiter -from tfw.mixins.observer_mixin import ObserverMixin -from tfw.mixins.supervisor_mixin import SupervisorLogMixin - - -class LogMonitor(ObserverMixin): - def __init__(self, process_name, log_tail=0): - self.prevent_log_recursion() - event_handler = SendLogWatchdogEventHandler( - process_name, - log_tail=log_tail - ) - self.observer.schedule( - event_handler, - event_handler.path - ) - - @staticmethod - def prevent_log_recursion(): - # This is done to prevent inotify event logs triggering themselves (infinite log recursion) - logging.getLogger('watchdog.observers.inotify_buffer').propagate = False - - -class SendLogWatchdogEventHandler(PatternMatchingWatchdogEventHandler, SupervisorLogMixin): - def __init__(self, process_name, log_tail=0): - self.process_name = process_name - self.procinfo = self.supervisor.getProcessInfo(self.process_name) - super().__init__([ - self.procinfo['stdout_logfile'], - self.procinfo['stderr_logfile'] - ]) - self.uplink = ServerUplinkConnector() - self.log_tail = log_tail - - @property - def path(self): - return dirname(self.procinfo['stdout_logfile']) - - @RateLimiter(rate_per_second=5) - def on_modified(self, event): - self.uplink.send({ - 'key': 'processlog', - 'data': { - 'command': 'new_log', - 'stdout': self.read_stdout(self.process_name, tail=self.log_tail), - 'stderr': self.read_stderr(self.process_name, tail=self.log_tail) - } - }) diff --git a/lib/tfw/components/log_monitoring_event_handler.py b/lib/tfw/components/log_monitoring_event_handler.py deleted file mode 100644 index 0bc7ab2..0000000 --- a/lib/tfw/components/log_monitoring_event_handler.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from tfw.event_handler_base import EventHandlerBase -from tfw.mixins.monitor_manager_mixin import MonitorManagerMixin -from tfw.components.log_monitor import LogMonitor -from tfw.config.logs import logging - -LOG = logging.getLogger(__name__) - - -class LogMonitoringEventHandler(EventHandlerBase, MonitorManagerMixin): - """ - Monitors the output of a supervisor process (stdout, stderr) and - sends the results to the frontend. - - Accepts messages that have a data['command'] key specifying - a command to be executed. - - The API of each command is documented in their respective handler. - """ - def __init__(self, key, process_name, log_tail=0): - super().__init__(key) - self.process_name = process_name - self.log_tail = log_tail - MonitorManagerMixin.__init__( - self, - LogMonitor, - self.process_name, - self.log_tail - ) - - self.command_handlers = { - 'process_name': self.handle_process_name, - 'log_tail': self.handle_log_tail - } - - def handle_event(self, message): - try: - data = message['data'] - self.command_handlers[data['command']](data) - self.reload_monitor() - except KeyError: - LOG.error('IGNORING MESSAGE: Invalid message received: %s', message) - - def handle_process_name(self, data): - """ - Changes the monitored process. - - :param data: TFW message data containing 'value' - (name of the process to monitor) - """ - self.set_monitor_args(data['value'], self.log_tail) - - def handle_log_tail(self, data): - """ - Sets tail length of the log the monitor will send - to the frontend (the monitor will send back the last - 'value' characters of the log). - - :param data: TFW message data containing 'value' - (new tail length) - """ - self.set_monitor_args(self.process_name, data['value']) - - def cleanup(self): - self.monitor.stop() diff --git a/lib/tfw/components/pipe_io_event_handler.py b/lib/tfw/components/pipe_io_event_handler.py deleted file mode 100644 index 0a94e57..0000000 --- a/lib/tfw/components/pipe_io_event_handler.py +++ /dev/null @@ -1,143 +0,0 @@ -from abc import abstractmethod -from json import loads, dumps -from subprocess import run, PIPE, Popen -from functools import partial -from os import getpgid, killpg -from os.path import join -from signal import SIGTERM -from secrets import token_urlsafe -from threading import Thread -from contextlib import suppress - -from tfw.event_handler_base import EventHandlerBase -from tfw.config.logs import logging - -from .pipe_io_server import PipeIOServer, terminate_process_on_failure - -LOG = logging.getLogger(__name__) -DEFAULT_PERMISSIONS = 0o600 - - -class PipeIOEventHandlerBase(EventHandlerBase): - def __init__(self, key, in_pipe_path, out_pipe_path, permissions=DEFAULT_PERMISSIONS): - super().__init__(key) - self.pipe_io = CallbackPipeIOServer( - in_pipe_path, - out_pipe_path, - self.handle_pipe_event, - permissions - ) - self.pipe_io.start() - - @abstractmethod - def handle_pipe_event(self, message_bytes): - raise NotImplementedError() - - def cleanup(self): - self.pipe_io.stop() - - -class CallbackPipeIOServer(PipeIOServer): - def __init__(self, in_pipe_path, out_pipe_path, callback, permissions): - super().__init__(in_pipe_path, out_pipe_path, permissions) - self.callback = callback - - def handle_message(self, message): - try: - self.callback(message) - except: # pylint: disable=bare-except - LOG.exception('Failed to handle message %s from pipe %s!', message, self.in_pipe) - - -class PipeIOEventHandler(PipeIOEventHandlerBase): - def handle_event(self, message): - json_bytes = dumps(message).encode() - self.pipe_io.send_message(json_bytes) - - def handle_pipe_event(self, message_bytes): - json = loads(message_bytes) - self.server_connector.send(json) - - -class TransformerPipeIOEventHandler(PipeIOEventHandlerBase): - # pylint: disable=too-many-arguments - def __init__( - self, key, in_pipe_path, out_pipe_path, - transform_in_cmd, transform_out_cmd, - permissions=DEFAULT_PERMISSIONS - ): - self._transform_in = partial(self._transform_message, transform_in_cmd) - self._transform_out = partial(self._transform_message, transform_out_cmd) - super().__init__(key, in_pipe_path, out_pipe_path, permissions) - - @staticmethod - def _transform_message(transform_cmd, message): - proc = run( - transform_cmd, - input=message, - stdout=PIPE, - stderr=PIPE, - shell=True - ) - if proc.returncode == 0: - return proc.stdout - raise ValueError(f'Transforming message {message} failed!') - - def handle_event(self, message): - json_bytes = dumps(message).encode() - transformed_bytes = self._transform_out(json_bytes) - if transformed_bytes: - self.pipe_io.send_message(transformed_bytes) - - def handle_pipe_event(self, message_bytes): - transformed_bytes = self._transform_in(message_bytes) - if transformed_bytes: - json_message = loads(transformed_bytes) - self.server_connector.send(json_message) - - -class CommandEventHandler(PipeIOEventHandler): - def __init__(self, key, command, permissions=DEFAULT_PERMISSIONS): - super().__init__( - key, - self._generate_tempfilename(), - self._generate_tempfilename(), - permissions - ) - - self._proc_stdin = open(self.pipe_io.out_pipe, 'rb') - self._proc_stdout = open(self.pipe_io.in_pipe, 'wb') - self._proc = Popen( - command, shell=True, executable='/bin/bash', - stdin=self._proc_stdin, stdout=self._proc_stdout, stderr=PIPE, - start_new_session=True - ) - self._monitor_proc_thread = self._start_monitor_proc() - - def _generate_tempfilename(self): - # pylint: disable=no-self-use - random_filename = partial(token_urlsafe, 10) - return join('/tmp', f'{type(self).__name__}.{random_filename()}') - - def _start_monitor_proc(self): - thread = Thread(target=self._monitor_proc, daemon=True) - thread.start() - return thread - - @terminate_process_on_failure - def _monitor_proc(self): - return_code = self._proc.wait() - if return_code == -int(SIGTERM): - # supervisord asked the program to terminate, this is fine - return - if return_code != 0: - _, stderr = self._proc.communicate() - raise RuntimeError(f'Subprocess failed ({return_code})! Stderr:\n{stderr.decode()}') - - def cleanup(self): - with suppress(ProcessLookupError): - process_group_id = getpgid(self._proc.pid) - killpg(process_group_id, SIGTERM) - self._proc_stdin.close() - self._proc_stdout.close() - super().cleanup() diff --git a/lib/tfw/components/pipe_io_server/__init__.py b/lib/tfw/components/pipe_io_server/__init__.py deleted file mode 100644 index cab334b..0000000 --- a/lib/tfw/components/pipe_io_server/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .pipe_io_server import PipeIOServer -from .terminate_process_on_failure import terminate_process_on_failure diff --git a/lib/tfw/components/pipe_io_server/deque.py b/lib/tfw/components/pipe_io_server/deque.py deleted file mode 100644 index b2f1ab4..0000000 --- a/lib/tfw/components/pipe_io_server/deque.py +++ /dev/null @@ -1,27 +0,0 @@ -from collections import deque -from threading import Lock, Condition - - -class Deque: - def __init__(self): - self._queue = deque() - - self._mutex = Lock() - self._not_empty = Condition(self._mutex) - - def pop(self): - with self._mutex: - while not self._queue: - self._not_empty.wait() - return self._queue.pop() - - def push(self, item): - self._push(item, self._queue.appendleft) - - def push_front(self, item): - self._push(item, self._queue.append) - - def _push(self, item, put_method): - with self._mutex: - put_method(item) - self._not_empty.notify() diff --git a/lib/tfw/components/pipe_io_server/pipe.py b/lib/tfw/components/pipe_io_server/pipe.py deleted file mode 100644 index eb021ae..0000000 --- a/lib/tfw/components/pipe_io_server/pipe.py +++ /dev/null @@ -1,16 +0,0 @@ -from os import mkfifo, remove, chmod -from os.path import exists - - -class Pipe: - def __init__(self, path): - self.path = path - - def recreate(self, permissions): - self.remove() - mkfifo(self.path) - chmod(self.path, permissions) # use chmod to ignore umask - - def remove(self): - if exists(self.path): - remove(self.path) diff --git a/lib/tfw/components/pipe_io_server/pipe_io_server.py b/lib/tfw/components/pipe_io_server/pipe_io_server.py deleted file mode 100644 index 2715f40..0000000 --- a/lib/tfw/components/pipe_io_server/pipe_io_server.py +++ /dev/null @@ -1,73 +0,0 @@ -from abc import ABC, abstractmethod -from threading import Thread, Event -from typing import Callable - -from .pipe_reader_thread import PipeReaderThread -from .pipe_writer_thread import PipeWriterThread -from .pipe import Pipe -from .terminate_process_on_failure import terminate_process_on_failure - - -class PipeIOServer(ABC, Thread): - def __init__(self, in_pipe=None, out_pipe=None, permissions=0o600): - super().__init__(daemon=True) - self._in_pipe, self._out_pipe = in_pipe, out_pipe - self._create_pipes(permissions) - self._stop_event = Event() - self._reader_thread, self._writer_thread = self._create_io_threads() - self._io_threads = (self._reader_thread, self._writer_thread) - self._on_stop = lambda: None - - def _create_pipes(self, permissions): - Pipe(self.in_pipe).recreate(permissions) - Pipe(self.out_pipe).recreate(permissions) - - @property - def in_pipe(self): - return self._in_pipe - - @property - def out_pipe(self): - return self._out_pipe - - def _create_io_threads(self): - reader_thread = PipeReaderThread(self.in_pipe, self._stop_event, self.handle_message) - writer_thread = PipeWriterThread(self.out_pipe, self._stop_event) - return reader_thread, writer_thread - - @abstractmethod - def handle_message(self, message): - raise NotImplementedError() - - def send_message(self, message): - self._writer_thread.write(message) - - @terminate_process_on_failure - def run(self): - for thread in self._io_threads: - thread.start() - self._stop_event.wait() - self._stop_threads() - - def stop(self): - self._stop_event.set() - if self.is_alive(): - self.join() - - def _stop_threads(self): - for thread in self._io_threads: - if thread.is_alive(): - thread.stop() - Pipe(self.in_pipe).remove() - Pipe(self.out_pipe).remove() - self._on_stop() - - def _set_on_stop(self, value): - if not isinstance(value, Callable): - raise ValueError("Supplied object is not callable!") - self._on_stop = value - - on_stop = property(fset=_set_on_stop) - - def wait(self): - self._stop_event.wait() diff --git a/lib/tfw/components/pipe_io_server/pipe_reader_thread.py b/lib/tfw/components/pipe_io_server/pipe_reader_thread.py deleted file mode 100644 index 4bce19d..0000000 --- a/lib/tfw/components/pipe_io_server/pipe_reader_thread.py +++ /dev/null @@ -1,44 +0,0 @@ -from contextlib import suppress -from os import open as osopen -from os import write, close, O_WRONLY, O_NONBLOCK -from threading import Thread - -from .terminate_process_on_failure import terminate_process_on_failure - - -class PipeReaderThread(Thread): - eof = b'' - stop_sequence = b'stop_reading\n' - - def __init__(self, pipe_path, stop_event, message_handler): - super().__init__(daemon=True) - self._message_handler = message_handler - self._pipe_path = pipe_path - self._stop_event = stop_event - - @terminate_process_on_failure - def run(self): - with self._open() as pipe: - while True: - message = pipe.readline() - if message == self.stop_sequence: - self._stop_event.set() - break - if message == self.eof: - self._open().close() - continue - self._message_handler(message[:-1]) - - def _open(self): - return open(self._pipe_path, 'rb') - - def stop(self): - while self.is_alive(): - self._unblock() - self.join() - - def _unblock(self): - with suppress(OSError): - fd = osopen(self._pipe_path, O_WRONLY | O_NONBLOCK) - write(fd, self.stop_sequence) - close(fd) diff --git a/lib/tfw/components/pipe_io_server/pipe_writer_thread.py b/lib/tfw/components/pipe_io_server/pipe_writer_thread.py deleted file mode 100644 index 3bc3fe7..0000000 --- a/lib/tfw/components/pipe_io_server/pipe_writer_thread.py +++ /dev/null @@ -1,50 +0,0 @@ -from contextlib import suppress -from os import O_NONBLOCK, O_RDONLY, close -from os import open as osopen -from threading import Thread - -from .terminate_process_on_failure import terminate_process_on_failure -from .deque import Deque - - -class PipeWriterThread(Thread): - def __init__(self, pipe_path, stop_event): - super().__init__(daemon=True) - self._pipe_path = pipe_path - self._stop_event = stop_event - self._write_queue = Deque() - - def write(self, message): - self._write_queue.push(message) - - @terminate_process_on_failure - def run(self): - with self._open() as pipe: - while True: - message = self._write_queue.pop() - if message is None: - self._stop_event.set() - break - try: - pipe.write(message + b'\n') - pipe.flush() - except BrokenPipeError: - try: # pipe was reopened, close() flushed the message - pipe.close() - except BrokenPipeError: # close() discarded the message - self._write_queue.push_front(message) - pipe = self._open() - - def _open(self): - return open(self._pipe_path, 'wb') - - def stop(self): - while self.is_alive(): - self._unblock() - self.join() - - def _unblock(self): - with suppress(OSError): - fd = osopen(self._pipe_path, O_RDONLY | O_NONBLOCK) - self._write_queue.push_front(None) - close(fd) diff --git a/lib/tfw/components/pipe_io_server/terminate_process_on_failure.py b/lib/tfw/components/pipe_io_server/terminate_process_on_failure.py deleted file mode 100644 index 7a0804c..0000000 --- a/lib/tfw/components/pipe_io_server/terminate_process_on_failure.py +++ /dev/null @@ -1,15 +0,0 @@ -from functools import wraps -from os import kill, getpid -from signal import SIGTERM -from traceback import print_exc - - -def terminate_process_on_failure(fun): - @wraps(fun) - def wrapper(*args, **kwargs): - try: - return fun(*args, **kwargs) - except: # pylint: disable=bare-except - print_exc() - kill(getpid(), SIGTERM) - return wrapper diff --git a/lib/tfw/components/process_managing_event_handler.py b/lib/tfw/components/process_managing_event_handler.py deleted file mode 100644 index 61b6c62..0000000 --- a/lib/tfw/components/process_managing_event_handler.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from xmlrpc.client import Fault as SupervisorFault - -from tfw.event_handler_base import EventHandlerBase -from tfw.mixins.supervisor_mixin import SupervisorMixin, SupervisorLogMixin -from tfw.components.directory_monitor import with_monitor_paused -from tfw.config.logs import logging - -LOG = logging.getLogger(__name__) - - -class ProcessManager(SupervisorMixin, SupervisorLogMixin): - def __init__(self): - self.commands = { - 'start': self.start_process, - 'stop': self.stop_process, - 'restart': self.restart_process - } - - def __call__(self, command, process_name): - return self.commands[command](process_name) - - -class ProcessManagingEventHandler(EventHandlerBase): - """ - Event handler that can manage processes managed by supervisor. - - This EventHandler accepts messages that have a data['command'] key specifying - a command to be executed. - Every message must contain a data['process_name'] field with the name of the - process to manage. This is the name specified in supervisor config files like so: - [program:someprogram] - - Commands available: start, stop, restart, readlog - (the names are as self-documenting as it gets) - """ - def __init__(self, key, dirmonitor=None, log_tail=0): - super().__init__(key) - self.monitor = dirmonitor - self.processmanager = ProcessManager() - self.log_tail = log_tail - - @with_monitor_paused - def handle_event(self, message): - try: - data = message['data'] - try: - self.processmanager(data['command'], data['process_name']) - except SupervisorFault as fault: - message['data']['error'] = fault.faultString - finally: - message['data']['stdout'] = self.processmanager.read_stdout( - data['process_name'], - self.log_tail - ) - message['data']['stderr'] = self.processmanager.read_stderr( - data['process_name'], - self.log_tail - ) - return message - except KeyError: - LOG.error('IGNORING MESSAGE: Invalid message received: %s', message) diff --git a/lib/tfw/components/terminal_event_handler.py b/lib/tfw/components/terminal_event_handler.py deleted file mode 100644 index 7fbd74b..0000000 --- a/lib/tfw/components/terminal_event_handler.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from tfw.event_handler_base import EventHandlerBase -from tfw.components.terminado_mini_server import TerminadoMiniServer -from tfw.config import TFWENV -from tfw.config.logs import logging -from tao.config import TAOENV - -LOG = logging.getLogger(__name__) - - -class TerminalEventHandler(EventHandlerBase): - """ - Event handler responsible for managing terminal sessions for frontend xterm - sessions to connect to. You need to instanciate this in order for frontend - terminals to work. - - This EventHandler accepts messages that have a data['command'] key specifying - a command to be executed. - The API of each command is documented in their respective handler. - """ - def __init__(self, key, monitor): - """ - :param key: key this EventHandler listens to - :param monitor: tfw.components.HistoryMonitor instance to read command history from - """ - super().__init__(key) - self._historymonitor = monitor - bash_as_user_cmd = ['sudo', '-u', TAOENV.USER, 'bash'] - - self.terminado_server = TerminadoMiniServer( - '/terminal', - TFWENV.TERMINADO_PORT, - TFWENV.TERMINADO_WD, - bash_as_user_cmd - ) - - self.commands = { - 'write': self.write, - 'read': self.read - } - - if self._historymonitor: - self._historymonitor.watch() - self.terminado_server.listen() - - @property - def historymonitor(self): - return self._historymonitor - - def handle_event(self, message): - LOG.debug('TerminadoEventHandler received event: %s', message) - try: - data = message['data'] - message['data'] = self.commands[data['command']](data) - return message - except KeyError: - LOG.error('IGNORING MESSAGE: Invalid message received: %s', message) - - def write(self, data): - """ - Writes a string to the terminal session (on the pty level). - Useful for pre-typing and executing commands for the user. - - :param data: TFW message data containing 'value' - (command to be written to the pty) - """ - self.terminado_server.pty.write(data['value']) - return data - - def read(self, data): - """ - Reads the history of commands executed. - - :param data: TFW message data containing 'count' - (the number of history elements to return) - :return dict: message with list of commands in data['history'] - """ - data['count'] = int(data.get('count', 1)) - if self.historymonitor: - data['history'] = self.historymonitor.history[-data['count']:] - return data - - def cleanup(self): - if self.historymonitor: - self.historymonitor.stop() diff --git a/lib/tfw/config/__init__.py b/lib/tfw/config/__init__.py deleted file mode 100644 index 3614ad9..0000000 --- a/lib/tfw/config/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from .envvars import TFWENV diff --git a/lib/tfw/config/envvars.py b/lib/tfw/config/envvars.py deleted file mode 100644 index ef590c6..0000000 --- a/lib/tfw/config/envvars.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from envvars import LazyEnvironment - -TFWENV = LazyEnvironment('TFW_', 'tfwenvtuple').environment diff --git a/lib/tfw/config/logs.py b/lib/tfw/config/logs.py deleted file mode 100644 index 15992b2..0000000 --- a/lib/tfw/config/logs.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -import logging - -logging.basicConfig(level=logging.DEBUG) diff --git a/lib/tfw/decorators/__init__.py b/lib/tfw/decorators/__init__.py deleted file mode 100644 index db64b25..0000000 --- a/lib/tfw/decorators/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. diff --git a/lib/tfw/decorators/rate_limiter.py b/lib/tfw/decorators/rate_limiter.py deleted file mode 100644 index 6f666c8..0000000 --- a/lib/tfw/decorators/rate_limiter.py +++ /dev/null @@ -1,101 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from functools import wraps, partial -from time import time, sleep - -from tfw.decorators.lazy_property import lazy_property - - -class RateLimiter: - """ - Decorator class for rate limiting, blocking. - - When applied to a function this decorator will apply rate limiting - if the function is invoked more frequently than rate_per_seconds. - - By default rate limiting means sleeping until the next invocation time - as per __init__ parameter rate_per_seconds. - - Note that this decorator BLOCKS THE THREAD it is being executed on, - so it is only acceptable for stuff running on a separate thread. - - If this is no good for you please refer to AsyncRateLimiter in this module, - which is designed not to block and use the IOLoop it is being called from. - """ - def __init__(self, rate_per_second): - """ - :param rate_per_second: max frequency the decorated method should be - invoked with - """ - self.min_interval = 1 / float(rate_per_second) - self.fun = None - self.last_call = time() - - def action(self, seconds_to_next_call): - if seconds_to_next_call: - sleep(seconds_to_next_call) - self.fun() - - def __call__(self, fun): - @wraps(fun) - def wrapper(*args, **kwargs): - self.fun = partial(fun, *args, **kwargs) - limit_seconds = self._limit_rate() - self.action(limit_seconds) - return wrapper - - def _limit_rate(self): - seconds_since_last_call = time() - self.last_call - seconds_to_next_call = self.min_interval - seconds_since_last_call - - if seconds_to_next_call > 0: - return seconds_to_next_call - self.last_call = time() - return 0 - - -class AsyncRateLimiter(RateLimiter): - """ - Decorator class for rate limiting, non-blocking. - - The semantics of the rate limiting: - - unlike RateLimiter this decorator never blocks, instead it adds an async - callback version of the decorated function to the IOLoop - (to be executed after the rate limiting has expired). - - the timing works similarly to RateLimiter - """ - def __init__(self, rate_per_second, ioloop_factory): - """ - :param rate_per_second: max frequency the decorated method should be - invoked with - :param ioloop_factory: callable that should return an instance of the - IOLoop of the application - """ - self._ioloop_factory = ioloop_factory - self._ioloop = None - self._last_callback = None - - self._make_action_thread_safe() - super().__init__(rate_per_second=rate_per_second) - - def _make_action_thread_safe(self): - self.action = partial(self.ioloop.add_callback, self.action) - - @lazy_property - def ioloop(self): - return self._ioloop_factory() - - def action(self, seconds_to_next_call): - # pylint: disable=method-hidden - if self._last_callback: - self.ioloop.remove_timeout(self._last_callback) - - self._last_callback = self.ioloop.call_later( - seconds_to_next_call, - self.fun_with_debounce - ) - - def fun_with_debounce(self): - self.last_call = time() - self.fun() diff --git a/lib/tfw/event_handler_base/__init__.py b/lib/tfw/event_handler_base/__init__.py deleted file mode 100644 index fd50525..0000000 --- a/lib/tfw/event_handler_base/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from .event_handler_base import EventHandlerBase -from .boradcasting_event_handler import BroadcastingEventHandler -from .fsm_aware_event_handler import FSMAwareEventHandler diff --git a/lib/tfw/event_handler_base/boradcasting_event_handler.py b/lib/tfw/event_handler_base/boradcasting_event_handler.py deleted file mode 100644 index 59ee493..0000000 --- a/lib/tfw/event_handler_base/boradcasting_event_handler.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from abc import ABC - -from tfw.event_handler_base.event_handler_base import EventHandlerBase -from tfw.crypto import message_checksum - - -class BroadcastingEventHandler(EventHandlerBase, ABC): - # pylint: disable=abstract-method - """ - Abstract base class for EventHandlers which broadcast responses - and intelligently ignore their own broadcasted messages they receive. - """ - def __init__(self, key): - super().__init__(key) - self.own_message_hashes = [] - - def event_handler_callback(self, message): - message_hash = message_checksum(message) - - if message_hash in self.own_message_hashes: - self.own_message_hashes.remove(message_hash) - return - - response = self.dispatch_handling(message) - if response: - self.own_message_hashes.append(message_checksum(response)) - self.server_connector.broadcast(response) diff --git a/lib/tfw/event_handler_base/event_handler_base.py b/lib/tfw/event_handler_base/event_handler_base.py deleted file mode 100644 index c013a8d..0000000 --- a/lib/tfw/event_handler_base/event_handler_base.py +++ /dev/null @@ -1,126 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from abc import ABC, abstractmethod -from inspect import currentframe -from typing import Iterable - -from tfw.networking.event_handlers.server_connector import ServerConnector -from tfw.config.logs import logging - -LOG = logging.getLogger(__name__) - - -class EventHandlerBase(ABC): - """ - Abstract base class for all Python based EventHandlers. Useful implementation template - for other languages. - - Derived classes must implement the handle_event() method - """ - def __init__(self, key): - self.server_connector = ServerConnector() - self.keys = [] - if isinstance(key, str): - self.keys.append(key) - elif isinstance(key, Iterable): - self.keys = list(key) - - self.subscribe(*self.keys) - self.server_connector.register_callback(self.event_handler_callback) - - @property - def key(self): - """ - Returns the oldest key this EventHandler was subscribed to. - """ - return self.keys[0] - - def event_handler_callback(self, message): - """ - Callback that is invoked when receiving a message. - Dispatches messages to handler methods and sends - a response back in case the handler returned something. - This is subscribed in __init__(). - """ - if not self.check_key(message): - return - - response = self.dispatch_handling(message) - if response: - self.server_connector.send(response) - - def check_key(self, message): - """ - Checks whether the message is intended for this - EventHandler. - - This is necessary because ZMQ handles PUB - SUB - connetions with pattern matching (e.g. someone - subscribed to 'fsm' will receive 'fsm_update' - messages as well. - """ - if '' in self.keys: - return True - return message['key'] in self.keys - - def dispatch_handling(self, message): - """ - Used to dispatch messages to their specific handlers. - - :param message: the message received - :returns: the message to send back - """ - return self.handle_event(message) - - @abstractmethod - def handle_event(self, message): - """ - Abstract method that implements the handling of messages. - - :param message: the message received - :returns: the message to send back - """ - raise NotImplementedError - - def subscribe(self, *keys): - """ - Subscribe this EventHandler to receive events for given keys. - Note that you can subscribe to the same key several times in which - case you will need to unsubscribe multiple times in order to stop - receiving events. - - :param keys: list of keys to subscribe to - """ - for key in keys: - self.server_connector.subscribe(key) - self.keys.append(key) - - def unsubscribe(self, *keys): - """ - Unsubscribe this eventhandler from the given keys. - - :param keys: list of keys to unsubscribe from - """ - for key in keys: - self.server_connector.unsubscribe(key) - self.keys.remove(key) - - def cleanup(self): - """ - Perform cleanup actions such as releasing database - connections and stuff like that. - """ - pass - - @classmethod - def get_local_instances(cls): - frame = currentframe() - if frame is None: - raise EnvironmentError('inspect.currentframe() is not supported!') - - locals_values = frame.f_back.f_locals.values() - return { - instance for instance in locals_values - if isinstance(instance, cls) - } diff --git a/lib/tfw/event_handler_base/fsm_aware_event_handler.py b/lib/tfw/event_handler_base/fsm_aware_event_handler.py deleted file mode 100644 index 01d6360..0000000 --- a/lib/tfw/event_handler_base/fsm_aware_event_handler.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from abc import ABC - -from tfw.event_handler_base.event_handler_base import EventHandlerBase -from tfw.networking.fsm_aware import FSMAware - - -class FSMAwareEventHandler(EventHandlerBase, FSMAware, ABC): - # pylint: disable=abstract-method - """ - Abstract base class for EventHandlers which automatically - keep track of the state of the TFW FSM. - """ - def __init__(self, key): - EventHandlerBase.__init__(self, key) - FSMAware.__init__(self) - self.subscribe('fsm_update') - - def dispatch_handling(self, message): - if self.update_fsm_data(message): - return None - return super().dispatch_handling(message) diff --git a/lib/tfw/fsm/__init__.py b/lib/tfw/fsm/__init__.py deleted file mode 100644 index 11a7a58..0000000 --- a/lib/tfw/fsm/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from .fsm_base import FSMBase -from .linear_fsm import LinearFSM -from .yaml_fsm import YamlFSM diff --git a/lib/tfw/mixins/__init__.py b/lib/tfw/mixins/__init__.py deleted file mode 100644 index db64b25..0000000 --- a/lib/tfw/mixins/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. diff --git a/lib/tfw/mixins/monitor_manager_mixin.py b/lib/tfw/mixins/monitor_manager_mixin.py deleted file mode 100644 index f247659..0000000 --- a/lib/tfw/mixins/monitor_manager_mixin.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from tfw.config.logs import logging - -LOG = logging.getLogger(__name__) - - -class MonitorManagerMixin: - def __init__(self, monitor_type, *monitor_args): - self._monitor_type = monitor_type - self._monitor = None - self.monitor_args = monitor_args - self.reload_monitor() - - @property - def monitor(self): - return self._monitor - - def set_monitor_args(self, *monitor_args): - self.monitor_args = monitor_args - - def reload_monitor(self): - if self._monitor: - try: - self._monitor.stop() - except KeyError: - LOG.debug('Working directory was removed – ignoring...') - self._monitor = self._monitor_type(*self.monitor_args) - self._monitor.watch() # This runs on a separate thread diff --git a/lib/tfw/mixins/observer_mixin.py b/lib/tfw/mixins/observer_mixin.py deleted file mode 100644 index d5415e5..0000000 --- a/lib/tfw/mixins/observer_mixin.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from watchdog.observers import Observer - -from tfw.decorators.lazy_property import lazy_property - - -class ObserverMixin: - @lazy_property - def observer(self): - # pylint: disable=no-self-use - return Observer() - - def watch(self): - self.observer.start() - - def stop(self): - self.observer.stop() - self.observer.join() diff --git a/lib/tfw/networking/__init__.py b/lib/tfw/networking/__init__.py deleted file mode 100644 index 500dd7a..0000000 --- a/lib/tfw/networking/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from .message_sender import MessageSender -from .event_handlers.server_connector import ServerUplinkConnector as TFWServerConnector -from .server.tfw_server import TFWServer diff --git a/lib/tfw/networking/event_handlers/__init__.py b/lib/tfw/networking/event_handlers/__init__.py deleted file mode 100644 index db64b25..0000000 --- a/lib/tfw/networking/event_handlers/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. diff --git a/lib/tfw/networking/event_handlers/server_connector.py b/lib/tfw/networking/event_handlers/server_connector.py deleted file mode 100644 index 7d24803..0000000 --- a/lib/tfw/networking/event_handlers/server_connector.py +++ /dev/null @@ -1,79 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from functools import partial - -import zmq -from zmq.eventloop.zmqstream import ZMQStream - -from tfw.networking.zmq_connector_base import ZMQConnectorBase -from tfw.networking.serialization import serialize_tfw_msg, with_deserialize_tfw_msg -from tfw.config import TFWENV -from tfw.config.logs import logging - -LOG = logging.getLogger(__name__) - - -class ServerDownlinkConnector(ZMQConnectorBase): - def __init__(self, zmq_context=None): - super(ServerDownlinkConnector, self).__init__(zmq_context) - self._zmq_sub_socket = self._zmq_context.socket(zmq.SUB) - self._zmq_sub_socket.connect(f'tcp://localhost:{TFWENV.PUBLISHER_PORT}') - self._zmq_sub_stream = ZMQStream(self._zmq_sub_socket) - - self.subscribe = partial(self._zmq_sub_socket.setsockopt_string, zmq.SUBSCRIBE) - self.unsubscribe = partial(self._zmq_sub_socket.setsockopt_string, zmq.UNSUBSCRIBE) - - def register_callback(self, callback): - callback = with_deserialize_tfw_msg(callback) - self._zmq_sub_stream.on_recv(callback) - - -class ServerUplinkConnector(ZMQConnectorBase): - """ - Class capable of sending messages to the TFW server and event handlers. - """ - def __init__(self, zmq_context=None): - super(ServerUplinkConnector, self).__init__(zmq_context) - self._zmq_push_socket = self._zmq_context.socket(zmq.PUSH) - self._zmq_push_socket.connect(f'tcp://localhost:{TFWENV.RECEIVER_PORT}') - - def send_to_eventhandler(self, message): - """ - Send a message to an event handler through the TFW server. - - This envelopes the desired message in the 'data' field of the message to - TFWServer, which will mirror it to event handlers. - - :param message: JSON message you want to send - """ - self.send({ - 'key': 'mirror', - 'data': message - }) - - def send(self, message): - """ - Send a message to the frontend through the TFW server. - - :param message: JSON message you want to send - """ - self._zmq_push_socket.send_multipart(serialize_tfw_msg(message)) - - def broadcast(self, message): - """ - Broadast a message through the TFW server. - - This envelopes the desired message in the 'data' field of the message to - TFWServer, which will broadast it. - - :param message: JSON message you want to send - """ - self.send({ - 'key': 'broadcast', - 'data': message - }) - - -class ServerConnector(ServerUplinkConnector, ServerDownlinkConnector): - pass diff --git a/lib/tfw/networking/fsm_aware.py b/lib/tfw/networking/fsm_aware.py deleted file mode 100644 index cb2b287..0000000 --- a/lib/tfw/networking/fsm_aware.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from tfw.crypto import KeyManager, verify_message - -from tfw.config.logs import logging - -LOG = logging.getLogger(__name__) - - -class FSMAware: - """ - Base class for stuff that has to be aware of the framework FSM. - This is done by processing 'fsm_update' messages. - """ - def __init__(self): - self.fsm_state = None - self.fsm_in_accepted_state = False - self.fsm_event_log = [] - self._auth_key = KeyManager().auth_key - - def update_fsm_data(self, message): - if message['key'] == 'fsm_update' and verify_message(self._auth_key, message): - self._handle_fsm_update(message) - return True - return False - - def _handle_fsm_update(self, message): - try: - update_data = message['data'] - new_state = update_data['current_state'] - if self.fsm_state != new_state: - self.handle_fsm_step(**update_data) - self.fsm_state = new_state - self.fsm_in_accepted_state = update_data['in_accepted_state'] - self.fsm_event_log.append(update_data) - except KeyError: - LOG.error('Invalid fsm_update message received!') - - def handle_fsm_step(self, **kwargs): - """ - Called in case the TFW FSM has stepped. - - :param kwargs: fsm_update 'data' field - """ - pass diff --git a/lib/tfw/networking/message_sender.py b/lib/tfw/networking/message_sender.py deleted file mode 100644 index 58eb1c9..0000000 --- a/lib/tfw/networking/message_sender.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from tfw.networking.event_handlers.server_connector import ServerUplinkConnector - - -class MessageSender: - """ - Provides mechanisms to send messages to our frontend messaging component. - """ - def __init__(self): - self.server_connector = ServerUplinkConnector() - self.key = 'message' - self.queue_key = 'queueMessages' - - def send(self, originator, message): - """ - Sends a message. - :param originator: name of sender to be displayed on the frontend - :param message: message to send - """ - data = { - 'originator': originator, - 'message': message - } - self.server_connector.send({ - 'key': self.key, - 'data': data - }) - - def queue_messages(self, originator, messages): - """ - Queues a list of messages to be displayed in a chatbot-like manner. - :param originator: name of sender to be displayed on the frontend - :param messages: list of messages to queue - """ - data = { - 'messages': [ - {'message': message, 'originator': originator} - for message in messages - ] - } - self.server_connector.send({ - 'key': self.queue_key, - 'data': data - }) - - @staticmethod - def generate_messages_from_queue(queue_message): - for message in queue_message['data']['messages']: - yield { - 'key': 'message', - 'data': message - } diff --git a/lib/tfw/networking/server/__init__.py b/lib/tfw/networking/server/__init__.py deleted file mode 100644 index db64b25..0000000 --- a/lib/tfw/networking/server/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. diff --git a/lib/tfw/networking/server/event_handler_connector.py b/lib/tfw/networking/server/event_handler_connector.py deleted file mode 100644 index 5075c56..0000000 --- a/lib/tfw/networking/server/event_handler_connector.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -import zmq -from zmq.eventloop.zmqstream import ZMQStream - -from tfw.networking.zmq_connector_base import ZMQConnectorBase -from tfw.networking.serialization import serialize_tfw_msg, with_deserialize_tfw_msg -from tfw.config import TFWENV -from tfw.config.logs import logging - -LOG = logging.getLogger(__name__) - - -class EventHandlerDownlinkConnector(ZMQConnectorBase): - def __init__(self, zmq_context=None): - super(EventHandlerDownlinkConnector, self).__init__(zmq_context) - self._zmq_pull_socket = self._zmq_context.socket(zmq.PULL) - self._zmq_pull_stream = ZMQStream(self._zmq_pull_socket) - address = f'tcp://*:{TFWENV.RECEIVER_PORT}' - self._zmq_pull_socket.bind(address) - LOG.debug('Pull socket bound to %s', address) - - -class EventHandlerUplinkConnector(ZMQConnectorBase): - def __init__(self, zmq_context=None): - super(EventHandlerUplinkConnector, self).__init__(zmq_context) - self._zmq_pub_socket = self._zmq_context.socket(zmq.PUB) - address = f'tcp://*:{TFWENV.PUBLISHER_PORT}' - self._zmq_pub_socket.bind(address) - LOG.debug('Pub socket bound to %s', address) - - -class EventHandlerConnector(EventHandlerDownlinkConnector, EventHandlerUplinkConnector): - def register_callback(self, callback): - callback = with_deserialize_tfw_msg(callback) - self._zmq_pull_stream.on_recv(callback) - - def send_message(self, message: dict): - self._zmq_pub_socket.send_multipart(serialize_tfw_msg(message)) diff --git a/lib/tfw/networking/server/tfw_server.py b/lib/tfw/networking/server/tfw_server.py deleted file mode 100644 index a1a0f5f..0000000 --- a/lib/tfw/networking/server/tfw_server.py +++ /dev/null @@ -1,114 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from abc import ABC, abstractmethod -from contextlib import suppress - -from tornado.web import Application - -from tfw.networking.server.zmq_websocket_proxy import ZMQWebSocketProxy -from tfw.networking.event_handlers.server_connector import ServerUplinkConnector -from tfw.networking.server.event_handler_connector import EventHandlerConnector -from tfw.networking.message_sender import MessageSender -from tfw.networking.fsm_aware import FSMAware -from tfw.crypto import KeyManager, verify_message, sign_message -from tfw.config.logs import logging - -LOG = logging.getLogger(__name__) - - -class TFWServer(FSMAware): - """ - This class handles the proxying of messages between the frontend and event handers. - It proxies messages from the "/ws" route to all event handlers subscribed to a ZMQ - SUB socket. - """ - def __init__(self): - super().__init__() - self._event_handler_connector = EventHandlerConnector() - self._uplink_connector = ServerUplinkConnector() - self._auth_key = KeyManager().auth_key - - self.application = Application([( - r'/ws', ZMQWebSocketProxy, { - 'event_handler_connector': self._event_handler_connector, - 'proxy_filters_and_callbacks': { - 'message_handlers': [ - self.handle_trigger, - self.handle_recover, - self.handle_fsm_update - ], - 'frontend_message_handlers': [self.save_frontend_messages] - } - } - )]) - - self._frontend_messages = FrontendMessageStorage() - - def handle_trigger(self, message): - if 'trigger' in message: - LOG.debug('Executing handler for trigger "%s"', message.get('trigger', '')) - fsm_eh_command = { - 'key': 'fsm', - 'data': { - 'command': 'trigger', - 'value': message['trigger'] - } - } - if verify_message(self._auth_key, message): - sign_message(self._auth_key, fsm_eh_command) - self._uplink_connector.send_to_eventhandler(fsm_eh_command) - - def handle_recover(self, message): - if message['key'] == 'recover': - self._frontend_messages.replay_messages(self._uplink_connector) - self._frontend_messages.clear() - - def handle_fsm_update(self, message): - self.update_fsm_data(message) - - def save_frontend_messages(self, message): - self._frontend_messages.save_message(message) - - def listen(self, port): - self.application.listen(port) - - -class MessageStorage(ABC): - def __init__(self): - self.saved_messages = [] - - def save_message(self, message): - with suppress(KeyError, AttributeError): - if self.filter_message(message): - self.saved_messages.extend(self.transform_message(message)) - - @abstractmethod - def filter_message(self, message): - raise NotImplementedError - - def transform_message(self, message): # pylint: disable=no-self-use - yield message - - def clear(self): - self.saved_messages.clear() - - -class FrontendMessageStorage(MessageStorage): - def filter_message(self, message): - key = message['key'] - command = message.get('data', {}).get('command') - return ( - key in ('message', 'dashboard', 'queueMessages') - or key == 'ide' and command in ('select', 'read') - ) - - def transform_message(self, message): - if message['key'] == 'queueMessages': - yield from MessageSender.generate_messages_from_queue(message) - else: - yield message - - def replay_messages(self, connector): - for message in self.saved_messages: - connector.send(message) diff --git a/lib/tfw/networking/server/zmq_websocket_proxy.py b/lib/tfw/networking/server/zmq_websocket_proxy.py deleted file mode 100644 index 15826bd..0000000 --- a/lib/tfw/networking/server/zmq_websocket_proxy.py +++ /dev/null @@ -1,155 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -import json - -from tornado.websocket import WebSocketHandler - -from tfw.mixins.callback_mixin import CallbackMixin -from tfw.config.logs import logging - -LOG = logging.getLogger(__name__) - - -class ZMQWebSocketProxy(WebSocketHandler): - # pylint: disable=abstract-method - instances = set() - sequence_number = 0 - - def initialize(self, **kwargs): # pylint: disable=arguments-differ - self._event_handler_connector = kwargs['event_handler_connector'] - self._proxy_filters_and_callbacks = kwargs.get('proxy_filters_and_callbacks', {}) - - self.proxy_eventhandler_to_websocket = TFWProxy( - self.send_eventhandler_message, - self.send_websocket_message - ) - self.proxy_websocket_to_eventhandler = TFWProxy( - self.send_websocket_message, - self.send_eventhandler_message - ) - - self.subscribe_proxy_callbacks() - - def subscribe_proxy_callbacks(self): - eventhandler_message_handlers = self._proxy_filters_and_callbacks.get('eventhandler_message_handlers', []) - frontend_message_handlers = self._proxy_filters_and_callbacks.get('frontend_message_handlers', []) - message_handlers = self._proxy_filters_and_callbacks.get('message_handlers', []) - proxy_filters = self._proxy_filters_and_callbacks.get('proxy_filters', []) - - self.proxy_websocket_to_eventhandler.subscribe_proxy_callbacks_and_filters( - eventhandler_message_handlers + message_handlers, - proxy_filters - ) - - self.proxy_eventhandler_to_websocket.subscribe_proxy_callbacks_and_filters( - frontend_message_handlers + message_handlers, - proxy_filters - ) - - def prepare(self): - ZMQWebSocketProxy.instances.add(self) - - def on_close(self): - ZMQWebSocketProxy.instances.remove(self) - - def open(self, *args, **kwargs): - LOG.debug('WebSocket connection initiated') - self._event_handler_connector.register_callback(self.eventhander_callback) - - def eventhander_callback(self, message): - """ - Invoked on ZMQ messages from event handlers. - """ - self.sequence_message(message) - LOG.debug('Received on pull socket: %s', message) - self.proxy_eventhandler_to_websocket(message) - - @classmethod - def sequence_message(cls, message): - cls.sequence_number += 1 - message['seq'] = cls.sequence_number - - def on_message(self, message): - """ - Invoked on WS messages from frontend. - """ - message = json.loads(message) - self.sequence_message(message) - LOG.debug('Received on WebSocket: %s', message) - self.proxy_websocket_to_eventhandler(message) - - def send_eventhandler_message(self, message): - self._event_handler_connector.send_message(message) - - @staticmethod - def send_websocket_message(message): - for instance in ZMQWebSocketProxy.instances: - instance.write_message(message) - - # much secure, very cors, wow - def check_origin(self, origin): - return True - - -class TFWProxy: - # pylint: disable=protected-access - def __init__(self, to_source, to_destination): - self.to_source = to_source - self.to_destination = to_destination - - self.proxy_filters = CallbackMixin() - self.proxy_callbacks = CallbackMixin() - - self.proxy_filters.subscribe_callback(self.validate_message) - - self.keyhandlers = { - 'mirror': self.mirror, - 'broadcast': self.broadcast - } - - @staticmethod - def validate_message(message): - if 'key' not in message: - raise ValueError('Invalid TFW message format!') - - def __call__(self, message): - if not self.filter_and_execute_callbacks(message): - return - - if message['key'] not in self.keyhandlers: - self.to_destination(message) - else: - handler = self.keyhandlers[message['key']] - try: - handler(message) - except KeyError: - LOG.error('Invalid "%s" message format! Ignoring.', handler.__name__) - - def filter_and_execute_callbacks(self, message): - try: - self.proxy_filters._execute_callbacks(message) - self.proxy_callbacks._execute_callbacks(message) - return True - except ValueError: - LOG.exception('Invalid TFW message received!') - return False - - def mirror(self, message): - message = message['data'] - if not self.filter_and_execute_callbacks(message): - return - LOG.debug('Mirroring message: %s', message) - self.to_source(message) - - def broadcast(self, message): - message = message['data'] - if not self.filter_and_execute_callbacks(message): - return - LOG.debug('Broadcasting message: %s', message) - self.to_source(message) - self.to_destination(message) - - def subscribe_proxy_callbacks_and_filters(self, proxy_callbacks, proxy_filters): - self.proxy_callbacks.subscribe_callbacks(*proxy_callbacks) - self.proxy_filters.subscribe_callbacks(*proxy_filters) diff --git a/lib/tfw/networking/zmq_connector_base.py b/lib/tfw/networking/zmq_connector_base.py deleted file mode 100644 index 47a850e..0000000 --- a/lib/tfw/networking/zmq_connector_base.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -import zmq - - -class ZMQConnectorBase: - def __init__(self, zmq_context=None): - self._zmq_context = zmq_context or zmq.Context.instance() diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..70c3cf1 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,4 @@ +[pytest] +filterwarnings = + ignore::DeprecationWarning:zmq + ignore::DeprecationWarning:watchdog diff --git a/requirements.txt b/requirements.txt index c2ab3ed..be5fcd3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,9 +1,15 @@ -tornado==5.1 -pyzmq==17.1.2 -transitions==0.6.6 -terminado==0.8.1 -watchdog==0.8.3 -PyYAML==3.13 -Jinja2==2.10 -cryptography==2.3.1 -python-dateutil==2.7.3 +tornado>=6.0.0,<7.0.0 +pyzmq>=17.0.0,<18.0.0 +transitions>=0.0.0,<1.0.0 +terminado>=0.0.0,<1.0.0 +watchdog>=0.0.0,<1.0.0 +PyYAML>=5.0.0,<6.0.0 +Jinja2>=2.0.0,<3.0.0 +cryptography>=2.0.0,<3.0.0 +python-dateutil>=2.0.0,<3.0.0 +SQLAlchemy>=1.0.0,<2.0.0 +python-dateutil>=2.0.0,<3.0.0 +pytest>=5.0.0,<6.0.0 +pylint>=2.0.0,<3.0.0 +rope>=0.0.0,<1.0.0 +pipe-io-server>=1.0.0,<2.0.0 diff --git a/setup.py b/setup.py index e5fa212..c4c7912 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ from os.path import dirname, realpath, join -from setuptools import setup, find_packages +from setuptools import setup here = dirname(realpath(__file__)) @@ -10,20 +10,20 @@ with open(join(here, 'requirements.txt'), 'r') as ifile: requirements = ifile.read().splitlines() setup( - name = 'tfw', - version = version, - description = 'Avatao tutorial-framework', - url = 'https://github.com/avatao-content/baseimage-tutorial-framework', - author = 'Avatao.com Innovative Learning Kft.', - author_email = 'support@avatao.com', - license = 'custom', - packages = find_packages('lib'), - package_dir = {'': 'lib'}, - install_requires = requirements, - extras_require = { + name='tfw', + version=version, + description='Avatao tutorial-framework', + url='https://github.com/avatao-content/baseimage-tutorial-framework', + author='Avatao.com Innovative Learning Kft.', + author_email='support@avatao.com', + license='custom', + packages=['tfw'], + package_dir={'tfw': 'tfw'}, + install_requires=requirements, + extras_require={ 'docs': [ 'sphinx >= 1.7.0', ], }, - zip_safe = False, + zip_safe=False, ) diff --git a/supervisor/components/tfw_init.conf b/supervisor/components/tfw_init.conf new file mode 100644 index 0000000..51f65b5 --- /dev/null +++ b/supervisor/components/tfw_init.conf @@ -0,0 +1,6 @@ +[program:tfw_init] +user=root +directory=/tmp +command=bash tfw_init.sh +autorestart=false +startsecs=0 diff --git a/supervisor/components/tfw_server.conf b/supervisor/components/tfw_server.conf index f583be9..de72b69 100644 --- a/supervisor/components/tfw_server.conf +++ b/supervisor/components/tfw_server.conf @@ -1,4 +1,4 @@ -[program:tfwserver] +[program:tfw_server] user=root directory=%(ENV_TFW_SERVER_DIR)s -command=python3 tfw_server.py +command=python3 -u tfw_server.py diff --git a/supervisor/tfw_init.sh b/supervisor/tfw_init.sh new file mode 100644 index 0000000..f2ecd72 --- /dev/null +++ b/supervisor/tfw_init.sh @@ -0,0 +1,15 @@ +#!/bin/bash +set -euo pipefail + +echo "export HISTFILE=\"${TFW_HISTFILE}\"" >> /tmp/bashrc && + cat /tmp/bashrc >> "/home/${AVATAO_USER}/.bashrc" + +if [[ -z "${HOTRELOAD-}" ]]; then + for dir in "${TFW_LIB_DIR}/tfw" "/etc/nginx" "/etc/supervisor"; do + chown -R root:root "${dir}" && chmod -R 700 "${dir}"; + done +fi + +chmod 777 "${TFW_PIPES_DIR}" + +rm -f bashrc requirements.txt tfw_init.sh diff --git a/supervisor/tfw_server.py b/supervisor/tfw_server.py index 78766ac..d3bbb5e 100644 --- a/supervisor/tfw_server.py +++ b/supervisor/tfw_server.py @@ -1,9 +1,11 @@ from tornado.ioloop import IOLoop -from tfw.networking import TFWServer -from tfw.config import TFWENV +from tfw.main import TFWServer, setup_logger, setup_signal_handlers if __name__ == '__main__': - TFWServer().listen(TFWENV.WEB_PORT) + setup_logger(__file__) + TFWServer().listen() + + setup_signal_handlers() IOLoop.instance().start() diff --git a/tfw/__init__.py b/tfw/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tfw/components/__init__.py b/tfw/components/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tfw/components/frontend/__init__.py b/tfw/components/frontend/__init__.py new file mode 100644 index 0000000..c952158 --- /dev/null +++ b/tfw/components/frontend/__init__.py @@ -0,0 +1,6 @@ +from .console_logs_handler import ConsoleLogsHandler +from .frontend_proxy_handler import FrontendProxyHandler +from .frontend_ready_handler import FrontendReadyHandler +from .message_queue_handler import MessageQueueHandler +from .message_sender import MessageSender +from .frontend_config_handler import FrontendConfigHandler diff --git a/tfw/components/frontend/console_logs_handler.py b/tfw/components/frontend/console_logs_handler.py new file mode 100644 index 0000000..eb02aa4 --- /dev/null +++ b/tfw/components/frontend/console_logs_handler.py @@ -0,0 +1,19 @@ +import logging + +LOG = logging.getLogger(__name__) + + +class ConsoleLogsHandler: + keys = ['process.log.new'] + + def __init__(self, *, stream): + self.stream = stream + + def handle_event(self, message, connector): + try: + connector.send_message({ + 'key': 'console.write', + 'content': message[self.stream] + }) + except KeyError: + LOG.error('Invalid %s message received: %s', self.keys, message) diff --git a/tfw/components/frontend/frontend_config_handler/__init__.py b/tfw/components/frontend/frontend_config_handler/__init__.py new file mode 100644 index 0000000..86c5b0a --- /dev/null +++ b/tfw/components/frontend/frontend_config_handler/__init__.py @@ -0,0 +1 @@ +from .frontend_config_handler import FrontendConfigHandler diff --git a/tfw/components/frontend/frontend_config_handler/frontend_config_handler.py b/tfw/components/frontend/frontend_config_handler/frontend_config_handler.py new file mode 100644 index 0000000..4c71097 --- /dev/null +++ b/tfw/components/frontend/frontend_config_handler/frontend_config_handler.py @@ -0,0 +1,36 @@ +from yaml import safe_load + +from tfw.internals.networking import Scope + + +class FrontendConfigHandler: + keys = ['frontend.ready'] + + def __init__(self, config_path): + self._config_path = config_path + + def handle_event(self, _, connector): + # pylint: disable=no-self-use + for message in self._config_messages: + connector.send_message(message) + connector.send_message({'key': 'frontend.ready'}, scope=Scope.WEBSOCKET) + + @property + def _config_messages(self): + config = safe_load(self._config_str) + return [ + self._create_config_message(k, v) + for k, v in config.items() + ] + + @property + def _config_str(self): + with open(self._config_path, 'r') as ifile: + return ifile.read() + + @staticmethod + def _create_config_message(key, value): + return { + 'key': f'frontend.{key}', + **value + } diff --git a/tfw/components/frontend/frontend_config_handler/test_frontend_config_handler.py b/tfw/components/frontend/frontend_config_handler/test_frontend_config_handler.py new file mode 100644 index 0000000..007e5f3 --- /dev/null +++ b/tfw/components/frontend/frontend_config_handler/test_frontend_config_handler.py @@ -0,0 +1,40 @@ +from textwrap import dedent + +from .frontend_config_handler import FrontendConfigHandler + + +class MockFrontendConfigHandler(FrontendConfigHandler): + @property + def _config_str(self): + return dedent(''' + cat: + someConfigKey: lel + otherStuff: 42 + foo: + hey: yaaay + ''') + + +class MockConnector: + def __init__(self): + self.messages = [] + + def send_message(self, message, **_): + self.messages.append(message) + + +def test_frontend_config_handler(): + connector = MockConnector() + handler = MockFrontendConfigHandler('') + + handler.handle_event({'key': 'frontend.ready'}, connector) + assert connector.messages[0] == { + 'key': 'frontend.cat', + 'someConfigKey': 'lel', + 'otherStuff': 42 + } + assert connector.messages[1] == { + 'key': 'frontend.foo', + 'hey': 'yaaay' + } + assert connector.messages[-1] == {'key': 'frontend.ready'} diff --git a/tfw/components/frontend/frontend_proxy_handler.py b/tfw/components/frontend/frontend_proxy_handler.py new file mode 100644 index 0000000..d48d8c9 --- /dev/null +++ b/tfw/components/frontend/frontend_proxy_handler.py @@ -0,0 +1,34 @@ +from tfw.internals.networking import Scope + +from .message_storage import FrontendMessageStorage + + +class FrontendProxyHandler: + keys = ['console', 'dashboard', 'frontend', 'message', 'ide.read', 'deploy.finish'] + type_id = 'ControlEventHandler' + + def __init__(self): + self.connector = None + self._frontend_message_storage = FrontendMessageStorage() + + def send_message(self, message): + self.connector.send_message(message, scope=Scope.WEBSOCKET) + + def handle_event(self, message, _): + self._frontend_message_storage.save_message(message) + if message['key'] == 'frontend.ready': + self.recover_frontend() + return + if self._filter_message(message): + self.send_message(message) + + @staticmethod + def _filter_message(message): + return not message['key'].startswith(( + 'ide', + 'message.queue' + )) + + def recover_frontend(self): + for message in self._frontend_message_storage.messages: + self.send_message(message) diff --git a/tfw/components/frontend/frontend_ready_handler.py b/tfw/components/frontend/frontend_ready_handler.py new file mode 100644 index 0000000..3b19708 --- /dev/null +++ b/tfw/components/frontend/frontend_ready_handler.py @@ -0,0 +1,39 @@ +import logging + +from tfw.internals.crypto import KeyManager, sign_message + +LOG = logging.getLogger(__name__) + + +class FrontendReadyHandler: + keys = ['frontend.ready', 'fsm.update'] + + def __init__(self, initial_trigger): + self.connector = None + self._auth_key = KeyManager().auth_key + self.initial_trigger = initial_trigger + + self.commands = { + 'frontend.ready': self.handle_ready, + 'fsm.update': self.handle_update + } + + def handle_event(self, message, _): + try: + self.commands[message['key']]() + except KeyError: + LOG.error('IGNORING MESSAGE: Invalid message received: %s', message) + + def handle_ready(self): + trigger = { + 'key': 'fsm.trigger', + 'transition': self.initial_trigger + } + sign_message(self._auth_key, trigger) + self.connector.send_message(trigger) + + def handle_update(self): + self.stop() + + def stop(self): + pass diff --git a/tfw/components/frontend/message_queue_handler/__init__.py b/tfw/components/frontend/message_queue_handler/__init__.py new file mode 100644 index 0000000..0eb2a68 --- /dev/null +++ b/tfw/components/frontend/message_queue_handler/__init__.py @@ -0,0 +1 @@ +from .message_queue_handler import MessageQueueHandler diff --git a/tfw/components/frontend/message_queue_handler/message_queue_handler.py b/tfw/components/frontend/message_queue_handler/message_queue_handler.py new file mode 100644 index 0000000..97f344b --- /dev/null +++ b/tfw/components/frontend/message_queue_handler/message_queue_handler.py @@ -0,0 +1,43 @@ +from time import sleep +from queue import Queue +from threading import Thread + + +class MessageQueueHandler: + keys = ['message.queue'] + type_id = 'ControlEventHandler' + + def __init__(self, wpm): + self.connector = None + self.wpm = wpm + self._queue = Queue() + self._thread = Thread(target=self._dispatch_messages) + + def _dispatch_messages(self): + for message in iter(self._queue.get, None): + wpm = message['wpm'] if 'wpm' in message else self.wpm + cps = 5 * wpm / 60 + sleep(len(message['message']) / cps) + self.connector.send_message(message) + + def handle_event(self, message, _): + for unpacked in self._generate_messages_from_queue(message): + self._queue.put(unpacked) + + @staticmethod + def _generate_messages_from_queue(queue_message): + last = queue_message['messages'][-1] + for message in queue_message['messages']: + yield { + 'key': 'message.send', + 'typing': message is not last, + **message + } + + def start(self): + self._thread.start() + + def cleanup(self): + self._queue.queue.clear() + self._queue.put(None) + self._thread.join() diff --git a/tfw/components/frontend/message_queue_handler/test_message_queue.py b/tfw/components/frontend/message_queue_handler/test_message_queue.py new file mode 100644 index 0000000..198c729 --- /dev/null +++ b/tfw/components/frontend/message_queue_handler/test_message_queue.py @@ -0,0 +1,67 @@ +# pylint: disable=redefined-outer-name +from math import inf +from time import sleep +from os import urandom +from random import randint + +import pytest + +from .message_queue_handler import MessageQueueHandler + + +class MockConnector: + def __init__(self): + self.callback = None + self.messages = [] + + def raise_event(self, message): + self.callback(message, self) + sleep(0.01) + + def send_message(self, message): + self.messages.append(message) + + +@pytest.fixture +def handler(): + connector = MockConnector() + handler = MessageQueueHandler(inf) + handler.connector = connector + connector.callback = handler.handle_event + handler.start() + yield handler + handler.cleanup() + + +@pytest.fixture +def queue(): + yield { + 'key': 'message.queue', + 'messages': [ + {'originator': urandom(4).hex(), 'message': urandom(16).hex()} + for _ in range(randint(5, 10)) + ] + } + + +def test_message_order(handler, queue): + handler.connector.raise_event(queue) + old_list = queue['messages'] + new_list = handler.connector.messages + length = len(old_list) + assert len(new_list) == length + for i in range(length): + unpacked = new_list[i] + assert unpacked['key'] == 'message.send' + assert unpacked['originator'] == old_list[i]['originator'] + assert unpacked['typing'] == (i < length-1) + + +def test_wpm(handler, queue): + handler.wpm = 10000 + handler.connector.raise_event(queue) + assert not handler.connector.messages + handler.wpm = 100000000 + handler.connector.raise_event(queue) + sleep(0.25) + assert len(handler.connector.messages) == 2*len(queue['messages']) diff --git a/tfw/components/frontend/message_sender.py b/tfw/components/frontend/message_sender.py new file mode 100644 index 0000000..2987f1e --- /dev/null +++ b/tfw/components/frontend/message_sender.py @@ -0,0 +1,30 @@ +class MessageSender: + def __init__(self, uplink): + self.uplink = uplink + + def send(self, message, originator=None): + message = { + 'key': 'message.send', + 'message': message + } + if originator: + message['originator'] = originator + self.uplink.send_message(message) + + def queue_messages(self, messages, originator=None): + message_queue = { + 'key': 'message.queue', + 'messages': [] + } + for message in messages: + next_message = {'message': message} + if originator: + next_message['originator'] = originator + message_queue['messages'].append(next_message) + self.uplink.send_message(message_queue) + + def set_originator(self, originator): + self.uplink.send_message({ + 'key': 'message.config', + 'originator': originator + }) diff --git a/tfw/components/frontend/message_storage.py b/tfw/components/frontend/message_storage.py new file mode 100644 index 0000000..1ad85c5 --- /dev/null +++ b/tfw/components/frontend/message_storage.py @@ -0,0 +1,49 @@ +from abc import ABC, abstractmethod +from contextlib import suppress + + +class MessageStorage(ABC): + def __init__(self): + self._messages = [] + + def save_message(self, message): + with suppress(KeyError, AttributeError): + if self._filter_message(message): + self._messages.extend(self._transform_message(message)) + + @abstractmethod + def _filter_message(self, message): + raise NotImplementedError + + def _transform_message(self, message): # pylint: disable=no-self-use + yield message + + def clear(self): + self._messages.clear() + + @property + def messages(self): + yield from self._messages + + +class FrontendMessageStorage(MessageStorage): + def _filter_message(self, message): + return message['key'].startswith(( + 'console.write', + 'message.send', + 'ide.read' + )) + + def _transform_message(self, message): + transformations = { + 'ide.read': self._delete_ide_content + } + if message['key'] in transformations: + yield from transformations[message['key']](message) + else: + yield message + + @staticmethod + def _delete_ide_content(message): + del message['content'] + yield message diff --git a/tfw/components/fsm/__init__.py b/tfw/components/fsm/__init__.py new file mode 100644 index 0000000..5e8f625 --- /dev/null +++ b/tfw/components/fsm/__init__.py @@ -0,0 +1 @@ +from .fsm_handler import FSMHandler diff --git a/tfw/components/fsm/fsm_handler.py b/tfw/components/fsm/fsm_handler.py new file mode 100644 index 0000000..2d31594 --- /dev/null +++ b/tfw/components/fsm/fsm_handler.py @@ -0,0 +1,42 @@ +import logging + +from tfw.internals.crypto import KeyManager, sign_message +from tfw.internals.networking import Scope, Intent + +from .fsm_updater import FSMUpdater + +LOG = logging.getLogger(__name__) + + +class FSMHandler: + keys = ['fsm'] + type_id = 'ControlEventHandler' + + def __init__(self, *, fsm_type): + self.connector = None + self.fsm = fsm_type() + self._fsm_updater = FSMUpdater(self.fsm) + self.auth_key = KeyManager().auth_key + + self.command_handlers = { + 'fsm.trigger': self.handle_trigger, + 'fsm.update' : self.handle_update + } + + def handle_event(self, message, _): + try: + message = self.command_handlers[message['key']](message) + if message: + fsm_update_message = self._fsm_updater.fsm_update + sign_message(self.auth_key, fsm_update_message) + self.connector.send_message(fsm_update_message, Scope.BROADCAST, Intent.EVENT) + except KeyError: + LOG.error('IGNORING MESSAGE: Invalid message received: %s', message) + + def handle_trigger(self, message): # pylint: disable=inconsistent-return-statements + if self.fsm.step(message['transition']): + return message + + def handle_update(self, message): + # pylint: disable=no-self-use + return message diff --git a/tfw/components/fsm/fsm_updater.py b/tfw/components/fsm/fsm_updater.py new file mode 100644 index 0000000..8ab488f --- /dev/null +++ b/tfw/components/fsm/fsm_updater.py @@ -0,0 +1,26 @@ +class FSMUpdater: + def __init__(self, fsm): + self.fsm = fsm + + @property + def fsm_update(self): + return { + 'key': 'fsm.update', + **self.fsm_update_data + } + + @property + def fsm_update_data(self): + valid_transitions = [ + {'trigger': trigger} + for trigger in self.fsm.get_triggers(self.fsm.state) + ] + if not self.fsm.event_log: + return None + last_fsm_event = self.fsm.event_log[-1] + return { + 'current_state': self.fsm.state, + 'valid_transitions': valid_transitions, + 'in_accepted_state': self.fsm.in_accepted_state, + 'last_event': last_fsm_event + } diff --git a/tfw/components/ide/__init__.py b/tfw/components/ide/__init__.py new file mode 100644 index 0000000..81b35cf --- /dev/null +++ b/tfw/components/ide/__init__.py @@ -0,0 +1,2 @@ +from .ide_handler import IdeHandler +from .deploy_handler import DeployHandler diff --git a/tfw/components/ide/deploy_handler.py b/tfw/components/ide/deploy_handler.py new file mode 100644 index 0000000..2efd7f6 --- /dev/null +++ b/tfw/components/ide/deploy_handler.py @@ -0,0 +1,34 @@ +import logging + +LOG = logging.getLogger(__name__) + + +class DeployHandler: + keys = ['deploy.start', 'process.restart'] + + def __init__(self, process_name='webservice'): + self.connector = None + self.process_name = process_name + + self.commands = { + 'deploy.start': self.handle_deploy, + 'process.restart': self.handle_process + } + + def handle_event(self, message, _): + try: + self.commands[message['key']](message) + except KeyError: + LOG.error('IGNORING MESSAGE: Invalid message received: %s', message) + + def handle_deploy(self, _): + self.connector.send_message({ + 'key': 'process.restart', + 'name': self.process_name + }) + + def handle_process(self, message): + response = {'key': 'deploy.finish'} + if 'error' in message: + response['error'] = message['error'] + self.connector.send_message(response) diff --git a/tfw/components/ide/file_manager/__init__.py b/tfw/components/ide/file_manager/__init__.py new file mode 100644 index 0000000..1a24998 --- /dev/null +++ b/tfw/components/ide/file_manager/__init__.py @@ -0,0 +1 @@ +from .file_manager import FileManager diff --git a/tfw/components/ide/file_manager/file_manager.py b/tfw/components/ide/file_manager/file_manager.py new file mode 100644 index 0000000..1ebc5e0 --- /dev/null +++ b/tfw/components/ide/file_manager/file_manager.py @@ -0,0 +1,66 @@ +import logging +from functools import wraps +from glob import glob +from fnmatch import fnmatchcase +from os.path import dirname, isdir, isfile, realpath, isabs + +LOG = logging.getLogger(__name__) + + +def _with_is_allowed(func): + @wraps(func) + def wrapper(self, *args, **kwargs): + if self.is_allowed(args[0]): + return func(self, *args, **kwargs) + raise ValueError('Forbidden path.') + return wrapper + + +class FileManager: # pylint: disable=too-many-instance-attributes + def __init__(self, patterns): + self.patterns = patterns + + @property + def files(self): + return list(set( + path + for pattern in self.patterns + for path in glob(pattern, recursive=True) + if isfile(path) and self.is_allowed(path) + )) + + @property + def parents(self): + return list(set( + self._find_directory(pattern) + for pattern in self.patterns + )) + + @staticmethod + def _find_directory(pattern): + while pattern and not isdir(pattern): + pattern = dirname(pattern) + return pattern + + def is_allowed(self, filepath): + return any( + fnmatchcase(realpath(filepath), pattern) + for pattern in self.patterns + ) + + def find_file(self, filename): + if not isabs(filename): + for filepath in self.files: + if filepath.endswith(filename): + return filepath + return filename + + @_with_is_allowed + def read_file(self, filepath): # pylint: disable=no-self-use + with open(filepath, 'rb', buffering=0) as ifile: + return ifile.read().decode(errors='surrogateescape') + + @_with_is_allowed + def write_file(self, filepath, contents): # pylint: disable=no-self-use + with open(filepath, 'wb', buffering=0) as ofile: + ofile.write(contents.encode()) diff --git a/tfw/components/ide/file_manager/test_file_manager.py b/tfw/components/ide/file_manager/test_file_manager.py new file mode 100644 index 0000000..b634b0d --- /dev/null +++ b/tfw/components/ide/file_manager/test_file_manager.py @@ -0,0 +1,94 @@ +# pylint: disable=redefined-outer-name +from dataclasses import dataclass +from secrets import token_urlsafe +from os import mkdir, symlink +from os.path import join, realpath, basename +from pathlib import Path +from tempfile import TemporaryDirectory + +import pytest + +from .file_manager import FileManager + + +@dataclass +class ManagerContext: + workdir: str + subdir: str + subfile: str + manager: FileManager + + def create_random_file(self, dirname, extension): + filename = self.join(f'{dirname}/{generate_name()}{extension}') + Path(filename).touch() + return filename + + def create_random_folder(self, basepath): + dirname = self.join(f'{basepath}/{generate_name()}') + mkdir(dirname) + return dirname + + def create_random_link(self, source, dirname, extension): + linkname = self.join(f'{dirname}/{generate_name()}{extension}') + symlink(source, linkname) + return linkname + + def join(self, path): + return join(self.workdir, path) + +def generate_name(): + return token_urlsafe(16) + +@pytest.fixture() +def context(): + with TemporaryDirectory() as workdir: + workdir = realpath(workdir) # macOS uses a symlinked TMPDIR + subdir = join(workdir, generate_name()) + subfile = join(subdir, generate_name() + '.txt') + mkdir(subdir) + Path(subfile).touch() + manager = FileManager([join(workdir, '**/*.txt')]) + yield ManagerContext(workdir, subdir, subfile, manager) + +def test_matching_files(context): + newdir = context.create_random_folder(context.subdir) + newfile = context.create_random_file(newdir, '.txt') + newlink = context.create_random_link(newfile, newdir, '.txt') + assert set(context.manager.files) == {context.subfile, newfile, newlink} + +def test_unmatching_files(context): + newtxt = context.create_random_file(context.workdir, '.txt') + newbin = context.create_random_file(context.subdir, '.bin') + context.create_random_link(newtxt, context.subdir, '.txt') + context.create_random_link(newbin, context.subdir, '.txt') + assert context.manager.files == [context.subfile] + +def test_parents(context): + newdir = context.create_random_folder(context.workdir) + context.manager.patterns += [f'{newdir}/[!/@]*/**/?.c'] + assert set(context.manager.parents) == {context.workdir, newdir} + +def test_read_write_file(context): + for _ in range(128): + content = token_urlsafe(32) + context.manager.write_file(context.subfile, content) + assert context.manager.read_file(context.subfile) == content + with open(context.subfile, 'r') as ifile: + assert ifile.read() == content + +def test_regular_ide_actions(context): + newfile1 = context.create_random_file(context.subdir, '.txt') + newfile2 = context.create_random_file(context.subdir, '.txt') + for _ in range(4): + context.manager.filename = newfile1 + content1, content2 = token_urlsafe(32), token_urlsafe(32) + context.manager.write_file(newfile1, content1) + context.manager.write_file(newfile2, content2) + assert context.manager.read_file(newfile1) == content1 + assert context.manager.read_file(newfile2) == content2 + +def test_find_file(context): + for _ in range(5): + file_abs = context.create_random_file(context.subdir, '.txt') + file_base = basename(file_abs) + assert context.manager.find_file(file_base) == file_abs diff --git a/tfw/components/ide/ide_handler.py b/tfw/components/ide/ide_handler.py new file mode 100644 index 0000000..3a86da4 --- /dev/null +++ b/tfw/components/ide/ide_handler.py @@ -0,0 +1,101 @@ +import logging + +from tfw.internals.networking import Scope +from tfw.internals.inotify import InotifyObserver + +from .file_manager import FileManager + +LOG = logging.getLogger(__name__) + +BUILD_ARTIFACTS = ( + "*.a", + "*.class", + "*.dll", + "*.dylib", + "*.elf", + "*.exe", + "*.jar", + "*.ko", + "*.la", + "*.lib", + "*.lo", + "*.o", + "*.obj", + "*.out", + "*.py[cod]", + "*.so", + "*.so.*", + "*.tar.gz", + "*.zip", + "*__pycache__*" +) + + +class IdeHandler: + keys = ['ide'] + type_id = 'ControlEventHandler' + + def __init__(self, *, patterns, initial_file=None): + self.connector = None + self.filemanager = FileManager(patterns) + self._initial_file = initial_file or '' + + self.monitor = InotifyObserver( + path=self.filemanager.parents, + exclude=BUILD_ARTIFACTS + ) + self.monitor.on_modified = self._reload_frontend + self.monitor.start() + + self.commands = { + 'ide.read' : self.read, + 'ide.write' : self.write + } + + def _reload_frontend(self, event): # pylint: disable=unused-argument + self.send_message({'key': 'ide.reload'}) + + @property + def initial_file(self): + if not self.filemanager.is_allowed(self._initial_file): + self._initial_file = self.filemanager.files[0] + return self._initial_file + + def send_message(self, message): + self.connector.send_message(message, scope=Scope.WEBSOCKET) + + def handle_event(self, message, _): + try: + self.commands[message['key']](message) + message['files'] = self.filemanager.files + self.send_message(message) + except KeyError: + LOG.error('IGNORING MESSAGE: Invalid message received: %s', message) + + def read(self, message): + if 'patterns' in message: + self.filemanager.patterns = message['patterns'] + try: + message['filename'] = self.filemanager.find_file( + message.get('filename') or self.initial_file + ) + message['content'] = self.filemanager.read_file(message['filename']) + except (PermissionError, ValueError): + message['content'] = 'You have no permission to open that file :(' + except FileNotFoundError: + message['content'] = 'This file does not exist :(' + except Exception: # pylint: disable=broad-except + message['content'] = 'Failed to read file :(' + LOG.exception('Error reading file!') + + def write(self, message): + try: + self.filemanager.write_file(message['filename'], message['content']) + except KeyError: + LOG.error('You must provide a filename to write!') + except Exception: # pylint: disable=broad-except + LOG.exception('Error writing file!') + del message['content'] + + def cleanup(self): + self.monitor.stop() diff --git a/tfw/components/pipe_io/__init__.py b/tfw/components/pipe_io/__init__.py new file mode 100644 index 0000000..6fc4fc0 --- /dev/null +++ b/tfw/components/pipe_io/__init__.py @@ -0,0 +1,3 @@ +from .pipe_io_handler import PipeIOHandler, PipeIOHandlerBase +from .command_handler import CommandHandler +from .pipe_connector import ProxyPipeConnectorHandler diff --git a/tfw/components/pipe_io/command_handler.py b/tfw/components/pipe_io/command_handler.py new file mode 100644 index 0000000..e8d1593 --- /dev/null +++ b/tfw/components/pipe_io/command_handler.py @@ -0,0 +1,58 @@ +from subprocess import PIPE, Popen +from functools import partial +from os import getpgid, killpg +from os.path import join +from signal import SIGTERM +from secrets import token_urlsafe +from threading import Thread +from contextlib import suppress + +from pipe_io_server import terminate_process_on_failure + +from .pipe_io_handler import PipeIOHandler, DEFAULT_PERMISSIONS + + +class CommandHandler(PipeIOHandler): + def __init__(self, command, permissions=DEFAULT_PERMISSIONS): + super().__init__( + self._generate_tempfilename(), + self._generate_tempfilename(), + permissions + ) + + self._proc_stdin = open(self.pipe_io.out_pipe, 'rb') + self._proc_stdout = open(self.pipe_io.in_pipe, 'wb') + self._proc = Popen( + command, shell=True, executable='/bin/bash', + stdin=self._proc_stdin, stdout=self._proc_stdout, stderr=PIPE, + start_new_session=True + ) + self._monitor_proc_thread = self._start_monitor_proc() + + def _generate_tempfilename(self): + # pylint: disable=no-self-use + random_filename = partial(token_urlsafe, 10) + return join('/tmp', f'{type(self).__name__}.{random_filename()}') + + def _start_monitor_proc(self): + thread = Thread(target=self._monitor_proc, daemon=True) + thread.start() + return thread + + @terminate_process_on_failure + def _monitor_proc(self): + return_code = self._proc.wait() + if return_code == -int(SIGTERM): + # supervisord asked the program to terminate, this is fine + return + if return_code != 0: + _, stderr = self._proc.communicate() + raise RuntimeError(f'Subprocess failed ({return_code})! Stderr:\n{stderr.decode()}') + + def cleanup(self): + with suppress(ProcessLookupError): + process_group_id = getpgid(self._proc.pid) + killpg(process_group_id, SIGTERM) + self._proc_stdin.close() + self._proc_stdout.close() + super().cleanup() diff --git a/tfw/components/pipe_io/pipe_connector/__init__.py b/tfw/components/pipe_io/pipe_connector/__init__.py new file mode 100644 index 0000000..07f335a --- /dev/null +++ b/tfw/components/pipe_io/pipe_connector/__init__.py @@ -0,0 +1 @@ +from .proxy_pipe_connector_handler import ProxyPipeConnectorHandler diff --git a/tfw/components/pipe_io/pipe_connector/pipe_connector.py b/tfw/components/pipe_io/pipe_connector/pipe_connector.py new file mode 100644 index 0000000..6f76e0e --- /dev/null +++ b/tfw/components/pipe_io/pipe_connector/pipe_connector.py @@ -0,0 +1,90 @@ +import logging +from stat import S_ISFIFO +from os import access, listdir, mkdir, stat, R_OK +from os.path import exists, join + +from pipe_io_server import PipeWriterServer + +from tfw.internals.inotify import InotifyObserver, InotifyFileCreatedEvent, InotifyFileDeletedEvent + +LOG = logging.getLogger(__name__) + + +class PipeConnector: + reader_pattern, writer_pattern = 'send', 'recv' + + def __init__(self, path): + self.recv_pipes, self.send_pipes = {}, {} + self.observer = self._build_observer(path) + self.observer.on_any_event = self._on_any_event + self.observer.start() + self._connect_existing_pipes(path) + + def _build_observer(self, path): # pylint: disable=no-self-use + if not exists(path): + mkdir(path) + if not access(path, R_OK): + raise ValueError('Path does not exist or is not accessible.') + return InotifyObserver(path, patterns=[f'*{self.reader_pattern}*', f'*{self.writer_pattern}*']) + + def _connect_existing_pipes(self, path): + for node in listdir(path): + node_path = join(path, node) + if self._is_pipe(node_path): + self._create_pipe(node_path) + LOG.debug('Connected to existing pipe "%s"', node_path) + + def _on_any_event(self, event): + path = event.src_path + if self._is_pipe(path) and isinstance(event, InotifyFileCreatedEvent): + self._create_pipe(path) + LOG.debug('Connected to new pipe "%s"', path) + elif isinstance(event, InotifyFileDeletedEvent): + self._delete_pipe(path) + LOG.debug('Disconnected from deleted pipe "%s"', path) + + @staticmethod + def _is_pipe(path): + return exists(path) and S_ISFIFO(stat(path).st_mode) + + def _create_pipe(self, path): + if self._find_pipe(path): + return + server = None + if self.writer_pattern in path: + pipes, server = self.recv_pipes, self.build_writer(path) + elif self.reader_pattern in path: + pipes, server = self.send_pipes, self.build_reader(path) + if server: + server.start() + pipes[path] = server + + def _find_pipe(self, path): + pipes = None + if path in self.recv_pipes.keys(): + pipes = self.recv_pipes + if path in self.send_pipes.keys(): + pipes = self.send_pipes + return pipes + + def build_reader(self, path): # pylint: disable=no-self-use + raise NotImplementedError() + + def build_writer(self, path): # pylint: disable=no-self-use + return PipeWriterServer(path, manage_pipes=False) + + def _delete_pipe(self, path): + pipes = self._find_pipe(path) + if pipes: + pipes[path].stop() + del pipes[path] + + def broadcast(self, message): + for server in self.recv_pipes.values(): + server.send_message(message) + + def stop(self): + for pipe in self.recv_pipes.values(): + pipe.stop() + for pipe in self.send_pipes.values(): + pipe.stop() diff --git a/tfw/components/pipe_io/pipe_connector/proxy_pipe_connector_handler.py b/tfw/components/pipe_io/pipe_connector/proxy_pipe_connector_handler.py new file mode 100644 index 0000000..c82ebca --- /dev/null +++ b/tfw/components/pipe_io/pipe_connector/proxy_pipe_connector_handler.py @@ -0,0 +1,46 @@ +import logging +from threading import Lock +from json import dumps, loads, JSONDecodeError + +from pipe_io_server import PipeReaderServer + +from .pipe_connector import PipeConnector + +LOG = logging.getLogger(__name__) + + +class ProxyPipeConnectorHandler: + keys = [''] + + def __init__(self, path): + self.connector, self.pipes = None, None + self.path = path + + def start(self): + self.pipes = ProxyPipeConnector(self.path, self.connector) + + def handle_event(self, message, _): + self.pipes.broadcast(dumps(message).encode()) + + def cleanup(self): + self.pipes.stop() + + +class ProxyPipeConnector(PipeConnector): + def __init__(self, path, connector): + self.connector = connector + self.mutex = Lock() + super().__init__(path) + + def build_reader(self, path): + reader = PipeReaderServer(path, manage_pipes=False) + reader.handle_message = self._handle_message + return reader + + def _handle_message(self, message): + try: + json_object = loads(message) + with self.mutex: + self.connector.send_message(json_object) + except JSONDecodeError: + LOG.error('Received invalid JSON message: %s', message) diff --git a/tfw/components/pipe_io/pipe_connector/test_proxy_pipe_connector.py b/tfw/components/pipe_io/pipe_connector/test_proxy_pipe_connector.py new file mode 100644 index 0000000..989c5bf --- /dev/null +++ b/tfw/components/pipe_io/pipe_connector/test_proxy_pipe_connector.py @@ -0,0 +1,157 @@ +# pylint: disable=redefined-outer-name +from enum import Enum +from dataclasses import dataclass +from json import dumps +from secrets import token_urlsafe +from os import urandom, mkfifo, mkdir, remove +from os.path import join +from pathlib import Path +from tempfile import TemporaryDirectory + +import pytest +from tfw.internals.inotify import InotifyFileCreatedEvent, InotifyFileDeletedEvent + +from .proxy_pipe_connector_handler import ProxyPipeConnector + + +class Action(Enum): + SEND = 'send' + RECV = 'recv' + + +@dataclass +class PipeContext: + workdir: str + pipes: ProxyPipeConnector + + def emit_pipe_creation_event(self, action, inode_creator): + filename = self.join(f'{self.generate_name()}_{action.value}') + inode_creator(filename) + self.pipes.observer.on_any_event(InotifyFileCreatedEvent(filename)) + return filename + + def join(self, path): + return join(self.workdir, path) + + @staticmethod + def generate_name(): + return urandom(4).hex() + + +class MockPipeConnector(ProxyPipeConnector): + def __init__(self, path, connector): + self.reader_events, self.writer_events = [], [] + super().__init__(path, connector) + + def _build_observer(self, path): + return MockObserver() + + def build_reader(self, path): + self.reader_events.append(path) + reader = MockPipeServer() + reader.handle_message = self._handle_message + return reader + + def build_writer(self, path): + self.writer_events.append(path) + return MockPipeServer() + + +class MockObserver: + def start(self): + pass + + def on_any_event(self, event): + pass + + +class MockPipeServer: + def __init__(self): + self.messages = [] + + def handle_message(self, message): + pass + + def send_message(self, message): + self.messages.append(message) + + def start(self): + pass + + def stop(self): + pass + + +class MockConnector: # pylint: disable=too-few-public-methods + def __init__(self): + self.messages = [] + + def send_message(self, message): + self.messages.append(message) + + +@pytest.fixture +def workdir(): + with TemporaryDirectory() as workdir: + yield workdir + + +@pytest.fixture +def context(workdir): + mkfifo(join(workdir, Action.SEND.value)) + mkfifo(join(workdir, Action.RECV.value)) + yield PipeContext(workdir, MockPipeConnector(workdir, MockConnector())) + + +def test_existing_pipe_connection(context): + assert join(context.workdir, Action.SEND.value) in context.pipes.send_pipes.keys() + assert join(context.workdir, Action.RECV.value) in context.pipes.recv_pipes.keys() + + +def test_pipe_creation_deletion(context): + cases = [ + (Action.RECV, context.pipes.recv_pipes, context.pipes.writer_events), + (Action.SEND, context.pipes.send_pipes, context.pipes.reader_events) + ] + + for action, pipes, events in cases: + path = context.emit_pipe_creation_event(action, mkfifo) + assert events[-1] == path + assert path in pipes.keys() + remove(path) + context.pipes.observer.on_any_event(InotifyFileDeletedEvent(path)) + assert path not in pipes.keys() + + +def test_handle_message(context): + path = context.emit_pipe_creation_event(Action.SEND, mkfifo) + payload = {'key': token_urlsafe(16)} + context.pipes.send_pipes[path].handle_message(dumps(payload)) + assert context.pipes.connector.messages[-1] == payload + context.pipes.send_pipes[path].handle_message(token_urlsafe(32)) + assert len(context.pipes.connector.messages) == 1 + + +def test_broadcast(context): + paths = [ + context.emit_pipe_creation_event(Action.RECV, mkfifo) + for _ in range(32) + ] + payload = {'key': token_urlsafe(16)} + + context.pipes.broadcast(payload) + for path in paths: + assert context.pipes.recv_pipes[path].messages[-1] == payload + +def test_inode_types(context): + touch = lambda path: Path(path).touch() + cases = [ + (Action.RECV, context.pipes.recv_pipes, mkdir), + (Action.SEND, context.pipes.send_pipes, mkdir), + (Action.RECV, context.pipes.recv_pipes, touch), + (Action.SEND, context.pipes.send_pipes, touch) + ] + + for action, pipes, creator in cases: + path = context.emit_pipe_creation_event(action, creator) + assert path not in pipes.keys() diff --git a/tfw/components/pipe_io/pipe_io_handler.py b/tfw/components/pipe_io/pipe_io_handler.py new file mode 100644 index 0000000..bea846a --- /dev/null +++ b/tfw/components/pipe_io/pipe_io_handler.py @@ -0,0 +1,47 @@ +import logging +from abc import abstractmethod +from json import loads, dumps + +from pipe_io_server import PipeIOServer + +LOG = logging.getLogger(__name__) +DEFAULT_PERMISSIONS = 0o600 + + +class PipeIOHandlerBase: + keys = [''] + + def __init__(self, in_pipe_path, out_pipe_path, permissions=DEFAULT_PERMISSIONS): + self.connector = None + self.in_pipe = in_pipe_path + self.out_pipe = out_pipe_path + self.pipe_io = PipeIOServer( + in_pipe_path, + out_pipe_path, + permissions + ) + self.pipe_io.handle_message = self._server_handle_message + self.pipe_io.start() + + def _server_handle_message(self, message): + try: + self.handle_pipe_event(message) + except: # pylint: disable=bare-except + LOG.exception('Failed to handle message %s from pipe %s!', message, self.in_pipe) + + @abstractmethod + def handle_pipe_event(self, message_bytes): + raise NotImplementedError() + + def cleanup(self): + self.pipe_io.stop() + + +class PipeIOHandler(PipeIOHandlerBase): + def handle_event(self, message, _): + json_bytes = dumps(message).encode() + self.pipe_io.send_message(json_bytes) + + def handle_pipe_event(self, message_bytes): + json = loads(message_bytes) + self.connector.send_message(json) diff --git a/tfw/components/pipe_io/test_pipe_io_handler.py b/tfw/components/pipe_io/test_pipe_io_handler.py new file mode 100644 index 0000000..b4a0aaa --- /dev/null +++ b/tfw/components/pipe_io/test_pipe_io_handler.py @@ -0,0 +1,45 @@ +# pylint: disable=redefined-outer-name +from tempfile import TemporaryDirectory +from os.path import join +from queue import Queue +from json import dumps, loads + +import pytest + +from .pipe_io_handler import PipeIOHandler + + +@pytest.fixture +def pipe_io(): + with TemporaryDirectory() as tmpdir: + pipeio = PipeIOHandler( + join(tmpdir, 'in'), + join(tmpdir, 'out'), + permissions=0o600 + ) + pipeio.connector = MockConnector() + yield pipeio + pipeio.cleanup() + + +class MockConnector: + def __init__(self): + self.messages = Queue() + + def send_message(self, msg): + self.messages.put(msg) + + +def test_pipe_io_handler_recv(pipe_io): + test_msg = {'key': 'cica'} + with open(pipe_io.in_pipe, 'w') as ofile: + ofile.write(dumps(test_msg) + '\n') + + assert pipe_io.connector.messages.get() == test_msg + + +def test_pipe_io_handler_send(pipe_io): + test_msg = {'key': 'cica'} + pipe_io.handle_event(test_msg, None) + with open(pipe_io.out_pipe, 'r') as ifile: + assert loads(ifile.readline()) == test_msg diff --git a/tfw/components/process_management/__init__.py b/tfw/components/process_management/__init__.py new file mode 100644 index 0000000..8549b02 --- /dev/null +++ b/tfw/components/process_management/__init__.py @@ -0,0 +1,2 @@ +from .process_handler import ProcessHandler +from .process_log_handler import ProcessLogHandler diff --git a/tfw/components/process_management/log_inotify_observer.py b/tfw/components/process_management/log_inotify_observer.py new file mode 100644 index 0000000..1a5887a --- /dev/null +++ b/tfw/components/process_management/log_inotify_observer.py @@ -0,0 +1,34 @@ +from tfw.internals.networking import Scope, Intent +from tfw.internals.inotify import InotifyObserver + +from .supervisor import ProcessLogManager + + +class LogInotifyObserver(InotifyObserver, ProcessLogManager): + def __init__(self, connector, process_name, supervisor_uri, log_tail=0): + self._connector = connector + self._process_name = process_name + self.log_tail = log_tail + self._procinfo = None + ProcessLogManager.__init__(self, supervisor_uri) + InotifyObserver.__init__(self, self._get_logfiles()) + + def _get_logfiles(self): + self._procinfo = self.supervisor.getProcessInfo(self._process_name) + return self._procinfo['stdout_logfile'], self._procinfo['stderr_logfile'] + + @property + def process_name(self): + return self._process_name + + @process_name.setter + def process_name(self, process_name): + self._process_name = process_name + self.paths = self._get_logfiles() + + def on_modified(self, event): + self._connector.send_message({ + 'key': 'process.log.new', + 'stdout': self.read_stdout(self.process_name, tail=self.log_tail), + 'stderr': self.read_stderr(self.process_name, tail=self.log_tail) + }, Scope.BROADCAST, Intent.EVENT) diff --git a/tfw/components/process_management/process_handler.py b/tfw/components/process_management/process_handler.py new file mode 100644 index 0000000..c56a02b --- /dev/null +++ b/tfw/components/process_management/process_handler.py @@ -0,0 +1,33 @@ +import logging +from xmlrpc.client import Fault as SupervisorFault + +from tfw.internals.networking import Scope, Intent + +from .supervisor import ProcessManager + +LOG = logging.getLogger(__name__) + + +class ProcessHandler(ProcessManager): + keys = ['process'] + type_id = 'ControlEventHandler' + + def __init__(self, *, supervisor_uri): + ProcessManager.__init__(self, supervisor_uri) + + self.commands = { + 'process.start': self.start_process, + 'process.stop': self.stop_process, + 'process.restart': self.restart_process + } + + def handle_event(self, message, connector): + try: + try: + self.commands[message['key']](message['name']) + except SupervisorFault as fault: + message['error'] = fault.faultString + connector.send_message(message, scope=Scope.BROADCAST, intent=Intent.EVENT) + except KeyError: + if not message['key'].startswith('process.log'): + LOG.error('IGNORING MESSAGE: Invalid message received: %s', message) diff --git a/tfw/components/process_management/process_log_handler.py b/tfw/components/process_management/process_log_handler.py new file mode 100644 index 0000000..a61f806 --- /dev/null +++ b/tfw/components/process_management/process_log_handler.py @@ -0,0 +1,44 @@ +import logging + +from .log_inotify_observer import LogInotifyObserver + +LOG = logging.getLogger(__name__) + + +class ProcessLogHandler: + keys = ['process.log'] + type_id = 'ControlEventHandler' + + def __init__(self, *, process_name, supervisor_uri, log_tail=0): + self.connector, self._monitor = None, None + self.process_name = process_name + self._supervisor_uri = supervisor_uri + self._initial_log_tail = log_tail + + self.command_handlers = { + 'process.log.set': self.handle_set + } + + def start(self): + self._monitor = LogInotifyObserver( + connector=self.connector, + process_name=self.process_name, + supervisor_uri=self._supervisor_uri, + log_tail=self._initial_log_tail + ) + self._monitor.start() + + def handle_event(self, message, _): + try: + self.command_handlers[message['key']](message) + except KeyError: + LOG.error('IGNORING MESSAGE: Invalid message received: %s', message) + + def handle_set(self, data): + if data.get('name'): + self._monitor.process_name = data['name'] + if data.get('tail'): + self._monitor.log_tail = data['tail'] + + def cleanup(self): + self._monitor.stop() diff --git a/lib/tfw/mixins/supervisor_mixin.py b/tfw/components/process_management/supervisor.py similarity index 68% rename from lib/tfw/mixins/supervisor_mixin.py rename to tfw/components/process_management/supervisor.py index 189d7cc..1e50d05 100644 --- a/lib/tfw/mixins/supervisor_mixin.py +++ b/tfw/components/process_management/supervisor.py @@ -1,23 +1,15 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - +from os import remove +from contextlib import suppress import xmlrpc.client from xmlrpc.client import Fault as SupervisorFault -from contextlib import suppress -from os import remove - -from tfw.decorators.lazy_property import lazy_property -from tfw.config import TFWENV -class SupervisorBaseMixin: - @lazy_property - def supervisor(self): - # pylint: disable=no-self-use - return xmlrpc.client.ServerProxy(TFWENV.SUPERVISOR_HTTP_URI).supervisor +class SupervisorBase: + def __init__(self, supervisor_uri): + self.supervisor = xmlrpc.client.ServerProxy(supervisor_uri).supervisor -class SupervisorMixin(SupervisorBaseMixin): +class ProcessManager(SupervisorBase): def stop_process(self, process_name): with suppress(SupervisorFault): self.supervisor.stopProcess(process_name) @@ -30,7 +22,7 @@ class SupervisorMixin(SupervisorBaseMixin): self.start_process(process_name) -class SupervisorLogMixin(SupervisorBaseMixin): +class ProcessLogManager(SupervisorBase): def read_stdout(self, process_name, tail=0): return self.supervisor.readProcessStdoutLog(process_name, -tail, 0) diff --git a/tfw/components/snapshots/__init__.py b/tfw/components/snapshots/__init__.py new file mode 100644 index 0000000..b9b3243 --- /dev/null +++ b/tfw/components/snapshots/__init__.py @@ -0,0 +1 @@ +from .snapshot_handler import SnapshotHandler diff --git a/lib/tfw/components/directory_snapshotting_event_handler.py b/tfw/components/snapshots/snapshot_handler.py similarity index 61% rename from lib/tfw/components/directory_snapshotting_event_handler.py rename to tfw/components/snapshots/snapshot_handler.py index 8b6f384..0c8f797 100644 --- a/lib/tfw/components/directory_snapshotting_event_handler.py +++ b/tfw/components/snapshots/snapshot_handler.py @@ -1,6 +1,4 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - +import logging from os.path import join as joinpath from os.path import basename from os import makedirs @@ -8,25 +6,24 @@ from datetime import datetime from dateutil import parser as dateparser -from tfw.event_handler_base import EventHandlerBase -from tfw.components.snapshot_provider import SnapshotProvider -from tfw.config import TFWENV -from tfw.config.logs import logging +from .snapshot_provider import SnapshotProvider LOG = logging.getLogger(__name__) -class DirectorySnapshottingEventHandler(EventHandlerBase): - def __init__(self, key, directories, exclude_unix_patterns=None): - super().__init__(key) +class SnapshotHandler: + keys = ['snapshot'] + + def __init__(self, *, directories, snapshots_dir, exclude_unix_patterns=None): + self._snapshots_dir = snapshots_dir self.snapshot_providers = {} self._exclude_unix_patterns = exclude_unix_patterns self.init_snapshot_providers(directories) self.command_handlers = { - 'take_snapshot': self.handle_take_snapshot, - 'restore_snapshot': self.handle_restore_snapshot, - 'exclude': self.handle_exclude + 'snapshot.take': self.handle_take_snapshot, + 'snapshot.restore': self.handle_restore_snapshot, + 'snapshot.exclude': self.handle_exclude } def init_snapshot_providers(self, directories): @@ -38,32 +35,28 @@ class DirectorySnapshottingEventHandler(EventHandlerBase): self._exclude_unix_patterns ) - @staticmethod - def init_git_dir(index, directory): + def init_git_dir(self, index, directory): git_dir = joinpath( - TFWENV.SNAPSHOTS_DIR, + self._snapshots_dir, f'{basename(directory)}-{index}' ) makedirs(git_dir, exist_ok=True) return git_dir - def handle_event(self, message): + def handle_event(self, message, _): try: - data = message['data'] - message['data'] = self.command_handlers[data['command']](data) - return message + self.command_handlers[message['key']](message) except KeyError: LOG.error('IGNORING MESSAGE: Invalid message received: %s', message) - def handle_take_snapshot(self, data): + def handle_take_snapshot(self, _): LOG.debug('Taking snapshots of directories %s', self.snapshot_providers.keys()) for provider in self.snapshot_providers.values(): provider.take_snapshot() - return data - def handle_restore_snapshot(self, data): + def handle_restore_snapshot(self, message): date = dateparser.parse( - data.get( + message.get( 'value', datetime.now().isoformat() ) @@ -75,13 +68,11 @@ class DirectorySnapshottingEventHandler(EventHandlerBase): ) for provider in self.snapshot_providers.values(): provider.restore_snapshot(date) - return data - def handle_exclude(self, data): - exclude_unix_patterns = data['value'] + def handle_exclude(self, message): + exclude_unix_patterns = message['value'] if not isinstance(exclude_unix_patterns, list): raise KeyError for provider in self.snapshot_providers.values(): provider.exclude = exclude_unix_patterns - return data diff --git a/lib/tfw/components/snapshot_provider.py b/tfw/components/snapshots/snapshot_provider.py similarity index 98% rename from lib/tfw/components/snapshot_provider.py rename to tfw/components/snapshots/snapshot_provider.py index 8bd17bf..147fc2f 100644 --- a/lib/tfw/components/snapshot_provider.py +++ b/tfw/components/snapshots/snapshot_provider.py @@ -1,6 +1,3 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - import re from subprocess import run, CalledProcessError, PIPE from getpass import getuser diff --git a/tfw/components/terminal/__init__.py b/tfw/components/terminal/__init__.py new file mode 100644 index 0000000..584df0d --- /dev/null +++ b/tfw/components/terminal/__init__.py @@ -0,0 +1,3 @@ +from .terminal_handler import TerminalHandler +from .terminal_commands_handler import TerminalCommandsHandler +from .commands_equal import CommandsEqual diff --git a/lib/tfw/components/commands_equal.py b/tfw/components/terminal/commands_equal.py similarity index 95% rename from lib/tfw/components/commands_equal.py rename to tfw/components/terminal/commands_equal.py index d2d4d15..2e00d16 100644 --- a/lib/tfw/components/commands_equal.py +++ b/tfw/components/terminal/commands_equal.py @@ -1,10 +1,7 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - from shlex import split from re import search -from tfw.decorators.lazy_property import lazy_property +from tfw.internals.lazy import lazy_property class CommandsEqual: diff --git a/lib/tfw/components/history_monitor.py b/tfw/components/terminal/history_monitor.py similarity index 58% rename from lib/tfw/components/history_monitor.py rename to tfw/components/terminal/history_monitor.py index 410558a..7fa4a99 100644 --- a/lib/tfw/components/history_monitor.py +++ b/tfw/components/terminal/history_monitor.py @@ -1,30 +1,12 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - -from os.path import dirname from re import findall from re import compile as compileregex from abc import ABC, abstractmethod -from watchdog.events import PatternMatchingEventHandler - -from tfw.mixins.callback_mixin import CallbackMixin -from tfw.mixins.observer_mixin import ObserverMixin -from tfw.decorators.rate_limiter import RateLimiter +from tfw.internals.networking import Intent +from tfw.internals.inotify import InotifyObserver -class CallbackEventHandler(PatternMatchingEventHandler, ABC): - def __init__(self, files, *callbacks): - super().__init__(files) - self.callbacks = callbacks - - @RateLimiter(rate_per_second=2) - def on_modified(self, event): - for callback in self.callbacks: - callback() - - -class HistoryMonitor(CallbackMixin, ObserverMixin, ABC): +class HistoryMonitor(ABC, InotifyObserver): """ Abstract class capable of monitoring and parsing a history file such as bash HISTFILEs. Monitoring means detecting when the file was changed and @@ -36,29 +18,30 @@ class HistoryMonitor(CallbackMixin, ObserverMixin, ABC): command pattern property and optionally the sanitize_command method. See examples below. """ - def __init__(self, histfile): + def __init__(self, uplink, histfile): self.histfile = histfile - self._history = [] - self._last_length = len(self._history) - self.observer.schedule( - CallbackEventHandler( - [self.histfile], - self._fetch_history, - self._invoke_callbacks - ), - dirname(self.histfile) - ) + self.history = [] + self._last_length = len(self.history) + self.uplink = uplink + super().__init__(self.histfile) @property - def history(self): - return self._history + @abstractmethod + def domain(self): + raise NotImplementedError() + + def on_modified(self, event): + self._fetch_history() + if self._last_length < len(self.history): + for command in self.history[self._last_length:]: + self.send_message(command) def _fetch_history(self): - self._last_length = len(self._history) + self._last_length = len(self.history) with open(self.histfile, 'r') as ifile: pattern = compileregex(self.command_pattern) data = ifile.read() - self._history = [ + self.history = [ self.sanitize_command(command) for command in findall(pattern, data) ] @@ -72,9 +55,11 @@ class HistoryMonitor(CallbackMixin, ObserverMixin, ABC): # pylint: disable=no-self-use return command - def _invoke_callbacks(self): - if self._last_length < len(self._history): - self._execute_callbacks(self.history) + def send_message(self, command): + self.uplink.send_message({ + 'key': f'history.{self.domain}', + 'command': command + }, intent=Intent.EVENT) class BashMonitor(HistoryMonitor): @@ -87,6 +72,10 @@ class BashMonitor(HistoryMonitor): shopt -s histappend unset HISTCONTROL """ + @property + def domain(self): + return 'bash' + @property def command_pattern(self): return r'.+' @@ -100,6 +89,10 @@ class GDBMonitor(HistoryMonitor): HistoryMonitor to monitor GDB sessions. For this to work "set trace-commands on" must be set in GDB. """ + @property + def domain(self): + return 'gdb' + @property def command_pattern(self): return r'(?<=\n)\+(.+)\n' diff --git a/lib/tfw/components/terminado_mini_server.py b/tfw/components/terminal/terminado_mini_server.py similarity index 68% rename from lib/tfw/components/terminado_mini_server.py rename to tfw/components/terminal/terminado_mini_server.py index 3726097..dd43ced 100644 --- a/lib/tfw/components/terminado_mini_server.py +++ b/tfw/components/terminal/terminado_mini_server.py @@ -1,13 +1,8 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. +import logging -from tornado.ioloop import IOLoop from tornado.web import Application from terminado import TermSocket, SingleTermManager -from tfw.config import TFWENV -from tfw.config.logs import logging - LOG = logging.getLogger(__name__) @@ -30,6 +25,8 @@ class TerminadoMiniServer: @property def pty(self): + if self.term_manager.terminal is None: + self.term_manager.get_terminal() return self.term_manager.terminal.ptyproc class ResetterTermSocket(TermSocket): # pylint: disable=abstract-method @@ -43,13 +40,5 @@ class TerminadoMiniServer: def listen(self): self.application.listen(self.port) - -if __name__ == '__main__': - LOG.info('Terminado Mini Server listening on %s', TFWENV.TERMINADO_PORT) - TerminadoMiniServer( - '/terminal', - TFWENV.TERMINADO_PORT, - TFWENV.TERMINADO_WD, - ['bash'] - ).listen() - IOLoop.instance().start() + def stop(self): + self.term_manager.shutdown() diff --git a/lib/tfw/components/terminal_commands.py b/tfw/components/terminal/terminal_commands.py similarity index 90% rename from lib/tfw/components/terminal_commands.py rename to tfw/components/terminal/terminal_commands.py index 14563bd..d705eb2 100644 --- a/lib/tfw/components/terminal_commands.py +++ b/tfw/components/terminal/terminal_commands.py @@ -1,12 +1,8 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - +import logging from abc import ABC from re import match from shlex import split -from tfw.config.logs import logging - LOG = logging.getLogger(__name__) @@ -30,7 +26,7 @@ class TerminalCommands(ABC): You can also use this class to create new commands similarly. """ - def __init__(self, bashrc=None): + def __init__(self, bashrc): self._command_method_regex = r'^command_(.+)$' self.command_implemetations = self._build_command_to_implementation_dict() if bashrc is not None: @@ -62,8 +58,8 @@ class TerminalCommands(ABC): def _match_command_regex(self, string): return match(self._command_method_regex, string) - def callback(self, history): - parts = split(history[-1]) + def callback(self, command): + parts = split(command) command = parts[0] if command in self.command_implemetations.keys(): try: diff --git a/tfw/components/terminal/terminal_commands_handler.py b/tfw/components/terminal/terminal_commands_handler.py new file mode 100644 index 0000000..e47aee5 --- /dev/null +++ b/tfw/components/terminal/terminal_commands_handler.py @@ -0,0 +1,9 @@ +from .terminal_commands import TerminalCommands + + +class TerminalCommandsHandler(TerminalCommands): + keys = ['history.bash'] + + def handle_event(self, message, _): + command = message['command'] + self.callback(command) diff --git a/tfw/components/terminal/terminal_handler.py b/tfw/components/terminal/terminal_handler.py new file mode 100644 index 0000000..8044fae --- /dev/null +++ b/tfw/components/terminal/terminal_handler.py @@ -0,0 +1,49 @@ +import logging + +from .history_monitor import BashMonitor +from .terminado_mini_server import TerminadoMiniServer + +LOG = logging.getLogger(__name__) + + +class TerminalHandler: + keys = ['terminal'] + type_id = 'ControlEventHandler' + + def __init__(self, *, port, user, working_directory, histfile): + self.connector, self._historymonitor = None, None + self._histfile = histfile + bash_as_user_cmd = ['sudo', '-u', user, 'bash'] + + self.terminado_server = TerminadoMiniServer( + '/terminal', + port, + working_directory, + bash_as_user_cmd + ) + self.terminado_server.listen() + + self.commands = { + 'terminal.write': self.handle_write + } + + def start(self): + self._historymonitor = BashMonitor(self.connector, self._histfile) + self._historymonitor.start() + + @property + def historymonitor(self): + return self._historymonitor + + def handle_event(self, message, _): + try: + self.commands[message['key']](message) + except KeyError: + LOG.error('IGNORING MESSAGE: Invalid message received: %s', message) + + def handle_write(self, message): + self.terminado_server.pty.write(message['command']) + + def cleanup(self): + self.terminado_server.stop() + self.historymonitor.stop() diff --git a/tfw/config/__init__.py b/tfw/config/__init__.py new file mode 100644 index 0000000..f5476e9 --- /dev/null +++ b/tfw/config/__init__.py @@ -0,0 +1 @@ +from .envvars import TFWENV, TAOENV diff --git a/tfw/config/envvars.py b/tfw/config/envvars.py new file mode 100644 index 0000000..dcf767d --- /dev/null +++ b/tfw/config/envvars.py @@ -0,0 +1,4 @@ +from .lazy_environment import LazyEnvironment + +TFWENV = LazyEnvironment('TFW_', 'tfwenvtuple').environment +TAOENV = LazyEnvironment('AVATAO_', 'taoenvtuple').environment diff --git a/lib/envvars/__init__.py b/tfw/config/lazy_environment.py similarity index 78% rename from lib/envvars/__init__.py rename to tfw/config/lazy_environment.py index 46774d4..db9a3df 100644 --- a/lib/envvars/__init__.py +++ b/tfw/config/lazy_environment.py @@ -1,10 +1,7 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - from collections import namedtuple from os import environ -from tfw.decorators.lazy_property import lazy_property +from tfw.internals.lazy import lazy_property class LazyEnvironment: diff --git a/tfw/event_handlers.py b/tfw/event_handlers.py new file mode 100644 index 0000000..9beabe8 --- /dev/null +++ b/tfw/event_handlers.py @@ -0,0 +1,8 @@ +# pylint: disable=unused-import +from tfw.internals.event_handling import ( + EventHandler, + ControlEventHandler, + FSMAwareEventHandler, + SignedEventHandler, + SignedControlEventHandler +) diff --git a/tfw/fsm/__init__.py b/tfw/fsm/__init__.py new file mode 100644 index 0000000..2516525 --- /dev/null +++ b/tfw/fsm/__init__.py @@ -0,0 +1,3 @@ +from .fsm_base import FSMBase +from .linear_fsm import LinearFSM +from .yaml_fsm import YamlFSM diff --git a/lib/tfw/fsm/fsm_base.py b/tfw/fsm/fsm_base.py similarity index 91% rename from lib/tfw/fsm/fsm_base.py rename to tfw/fsm/fsm_base.py index d9fff29..c35d094 100644 --- a/lib/tfw/fsm/fsm_base.py +++ b/tfw/fsm/fsm_base.py @@ -1,13 +1,10 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - +import logging from collections import defaultdict from datetime import datetime from transitions import Machine, MachineError -from tfw.mixins.callback_mixin import CallbackMixin -from tfw.config.logs import logging +from tfw.internals.callback_mixin import CallbackMixin LOG = logging.getLogger(__name__) @@ -79,7 +76,7 @@ class FSMBase(Machine, CallbackMixin): 'from_state': from_state, 'to_state': self.state, 'trigger': trigger, - 'timestamp': datetime.utcnow() + 'timestamp': datetime.utcnow().isoformat() }) @property diff --git a/lib/tfw/fsm/linear_fsm.py b/tfw/fsm/linear_fsm.py similarity index 88% rename from lib/tfw/fsm/linear_fsm.py rename to tfw/fsm/linear_fsm.py index 5ac5eaf..354ffd2 100644 --- a/lib/tfw/fsm/linear_fsm.py +++ b/tfw/fsm/linear_fsm.py @@ -1,9 +1,6 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - from transitions import State -from tfw.fsm.fsm_base import FSMBase +from .fsm_base import FSMBase class LinearFSM(FSMBase): diff --git a/lib/tfw/fsm/yaml_fsm.py b/tfw/fsm/yaml_fsm.py similarity index 96% rename from lib/tfw/fsm/yaml_fsm.py rename to tfw/fsm/yaml_fsm.py index fbfc542..32c002b 100644 --- a/lib/tfw/fsm/yaml_fsm.py +++ b/tfw/fsm/yaml_fsm.py @@ -1,6 +1,3 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - from subprocess import Popen, run from functools import partial, singledispatch from contextlib import suppress @@ -9,7 +6,7 @@ import yaml import jinja2 from transitions import State -from tfw.fsm.fsm_base import FSMBase +from .fsm_base import FSMBase class YamlFSM(FSMBase): diff --git a/tfw/internals/__init__.py b/tfw/internals/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lib/tfw/mixins/callback_mixin.py b/tfw/internals/callback_mixin.py similarity index 86% rename from lib/tfw/mixins/callback_mixin.py rename to tfw/internals/callback_mixin.py index 8075f47..c54db5b 100644 --- a/lib/tfw/mixins/callback_mixin.py +++ b/tfw/internals/callback_mixin.py @@ -1,9 +1,6 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - from functools import partial -from tfw.decorators.lazy_property import lazy_property +from .lazy import lazy_property class CallbackMixin: diff --git a/tfw/internals/crypto/__init__.py b/tfw/internals/crypto/__init__.py new file mode 100644 index 0000000..0ff122a --- /dev/null +++ b/tfw/internals/crypto/__init__.py @@ -0,0 +1,2 @@ +from .authentication import sign_message, verify_message +from .key_manager import KeyManager diff --git a/lib/tfw/crypto.py b/tfw/internals/crypto/authentication.py similarity index 64% rename from lib/tfw/crypto.py rename to tfw/internals/crypto/authentication.py index 0b37893..0ca19a3 100644 --- a/lib/tfw/crypto.py +++ b/tfw/internals/crypto/authentication.py @@ -1,29 +1,19 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - from functools import wraps from base64 import b64encode, b64decode from copy import deepcopy -from hashlib import md5 -from os import urandom, chmod -from os.path import exists -from stat import S_IRUSR, S_IWUSR, S_IXUSR from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.hashes import SHA256 from cryptography.hazmat.primitives.hmac import HMAC as _HMAC from cryptography.exceptions import InvalidSignature -from tfw.networking.serialization import message_bytes -from tfw.decorators.lazy_property import lazy_property -from tfw.config import TFWENV - - -def message_checksum(message): - return md5(message_bytes(message)).hexdigest() +from tfw.internals.networking import message_bytes def sign_message(key, message): + message.pop('scope', None) + message.pop('intent', None) + message.pop('signature', None) signature = message_signature(key, message) message['signature'] = b64encode(signature).decode() @@ -33,6 +23,8 @@ def message_signature(key, message): def verify_message(key, message): + message.pop('scope', None) + message.pop('intent', None) message = deepcopy(message) try: signature_b64 = message.pop('signature') @@ -43,32 +35,6 @@ def verify_message(key, message): return False -class KeyManager: - def __init__(self): - self.keyfile = TFWENV.AUTH_KEY - if not exists(self.keyfile): - self._init_auth_key() - - @lazy_property - def auth_key(self): - with open(self.keyfile, 'rb') as ifile: - return ifile.read() - - def _init_auth_key(self): - key = self.generate_key() - with open(self.keyfile, 'wb') as ofile: - ofile.write(key) - self._chmod_700_keyfile() - return key - - @staticmethod - def generate_key(): - return urandom(32) - - def _chmod_700_keyfile(self): - chmod(self.keyfile, S_IRUSR | S_IWUSR | S_IXUSR) - - class HMAC: def __init__(self, key, message): self.key = key diff --git a/tfw/internals/crypto/key_manager.py b/tfw/internals/crypto/key_manager.py new file mode 100644 index 0000000..4e56cad --- /dev/null +++ b/tfw/internals/crypto/key_manager.py @@ -0,0 +1,48 @@ +from atexit import register +from tempfile import gettempdir +from os import urandom, chmod, remove +from os.path import exists, join +from stat import S_IRUSR, S_IWUSR, S_IXUSR + +from tfw.internals.lazy import lazy_property +from tfw.internals.ref_counter import RefCounter + +KEYFILE = join(gettempdir(), 'tfw-auth.key') +LOCKFILE = join(gettempdir(), 'tfw-auth.lock') + + +class KeyManagerRefCounter(RefCounter): + def deallocate(self): + if exists(KEYFILE): + remove(KEYFILE) + + +class KeyManager: + keyfile = KEYFILE + refcounter = KeyManagerRefCounter(LOCKFILE) + + def __init__(self): + if not exists(self.keyfile): + self._init_auth_key() + + @lazy_property + def auth_key(self): + with open(self.keyfile, 'rb') as ifile: + return ifile.read() + + def _init_auth_key(self): + key = self.generate_key() + with open(self.keyfile, 'wb') as ofile: + ofile.write(key) + self._chmod_700_keyfile() + return key + + @staticmethod + def generate_key(): + return urandom(32) + + def _chmod_700_keyfile(self): + chmod(self.keyfile, S_IRUSR | S_IWUSR | S_IXUSR) + + +register(KeyManager.refcounter.teardown_instance) diff --git a/tfw/internals/event_handling/__init__.py b/tfw/internals/event_handling/__init__.py new file mode 100644 index 0000000..bd5e14f --- /dev/null +++ b/tfw/internals/event_handling/__init__.py @@ -0,0 +1,6 @@ +from .event_handler import EventHandler +from .event_handler_factory_base import EventHandlerFactoryBase +from .control_event_handler import ControlEventHandler +from .fsm_aware_event_handler import FSMAwareEventHandler +from .signed_event_handler import SignedEventHandler +from .signed_control_event_handler import SignedControlEventHandler diff --git a/tfw/internals/event_handling/control_event_handler.py b/tfw/internals/event_handling/control_event_handler.py new file mode 100644 index 0000000..381963e --- /dev/null +++ b/tfw/internals/event_handling/control_event_handler.py @@ -0,0 +1,8 @@ +from tfw.internals.networking import Intent + +from .event_handler import EventHandler + + +class ControlEventHandler(EventHandler): # pylint: disable=abstract-method + def _validate_message(self, message): + return message.get('intent') != Intent.EVENT.value diff --git a/tfw/internals/event_handling/event_handler.py b/tfw/internals/event_handling/event_handler.py new file mode 100644 index 0000000..a503649 --- /dev/null +++ b/tfw/internals/event_handling/event_handler.py @@ -0,0 +1,36 @@ +from .type_id_registry import TypeIdRegistryMixin + + +class EventHandler(TypeIdRegistryMixin): + _instances = set() + _type_id_registry = {} + + def __init__(self, connector): + type(self)._instances.add(self) + self.connector = connector + + def start(self): + self.connector.register_callback(self._event_callback) + + def _event_callback(self, message): + if self._validate_message(message): + self.handle_event(message, self.connector) + + def _validate_message(self, message): + # pylint: disable=unused-argument,no-self-use + return True + + def handle_event(self, message, connector): + raise NotImplementedError() + + @classmethod + def stop_all_instances(cls): + for instance in cls._instances: + instance.stop() + + def stop(self): + self.connector.close() + self.cleanup() + + def cleanup(self): + pass diff --git a/tfw/internals/event_handling/event_handler_factory_base.py b/tfw/internals/event_handling/event_handler_factory_base.py new file mode 100644 index 0000000..8c5df21 --- /dev/null +++ b/tfw/internals/event_handling/event_handler_factory_base.py @@ -0,0 +1,79 @@ +from contextlib import suppress + +from .event_handler import EventHandler + + +class EventHandlerFactoryBase: + def build(self, handler_stub, *, keys=None, event_handler_type=None): + builder = EventHandlerBuilder(handler_stub, keys, event_handler_type) + connector = self._build_connector() + event_handler = builder.build(connector) + handler_stub.connector = connector + with suppress(AttributeError): + handler_stub.start() + event_handler.start() + return event_handler + + def _build_connector(self): + raise NotImplementedError() + + +class EventHandlerBuilder: + def __init__(self, event_handler, supplied_keys, event_handler_type): + self._analyzer = HandlerStubAnalyzer(event_handler, supplied_keys) + self._event_handler_type = self._determine_type(event_handler_type) + + def _determine_type(self, provided_type): + type_ = provided_type or self._analyzer.event_handler_type or EventHandler + if not issubclass(type_, EventHandler): + raise ValueError("Invalid type supplied!") + return type_ + + def build(self, connector): + event_handler = self._event_handler_type(connector) + connector.subscribe(*self._try_get_keys(event_handler)) + event_handler.handle_event = self._analyzer.handle_event + with suppress(AttributeError): + event_handler.cleanup = self._analyzer.cleanup + return event_handler + + def _try_get_keys(self, event_handler): + try: + return self._analyzer.keys + except ValueError: + with suppress(AttributeError): + return event_handler.keys + raise + + +class HandlerStubAnalyzer: + def __init__(self, event_handler, supplied_keys): + self._event_handler = event_handler + self._supplied_keys = supplied_keys + + @property + def keys(self): + if self._supplied_keys is None: + try: + return self._event_handler.keys + except AttributeError: + raise ValueError('No keys supplied!') + return self._supplied_keys + + @property + def handle_event(self): + try: + return self._event_handler.handle_event + except AttributeError: + if callable(self._event_handler): + return self._event_handler + raise ValueError('Object must implement handle_event or be a callable!') + + @property + def cleanup(self): + return self._event_handler.cleanup + + @property + def event_handler_type(self): + with suppress(AttributeError): + return EventHandler.get_type(self._event_handler.type_id) diff --git a/tfw/internals/event_handling/fsm_aware.py b/tfw/internals/event_handling/fsm_aware.py new file mode 100644 index 0000000..e5de2dc --- /dev/null +++ b/tfw/internals/event_handling/fsm_aware.py @@ -0,0 +1,34 @@ +import logging + +from tfw.internals.crypto import KeyManager, verify_message + +LOG = logging.getLogger(__name__) + + +class FSMAware: + keys = ['fsm.update'] + + def __init__(self): + self.fsm_state = None + self.fsm_in_accepted_state = False + self.fsm_event_log = [] + self._auth_key = KeyManager().auth_key + + def process_message(self, message): + if message['key'] == 'fsm.update': + if verify_message(self._auth_key, message): + self._handle_fsm_update(message) + + def _handle_fsm_update(self, message): + try: + new_state = message['current_state'] + if self.fsm_state != new_state: + self.handle_fsm_step(message) + self.fsm_state = new_state + self.fsm_in_accepted_state = message['in_accepted_state'] + self.fsm_event_log.append(message) + except KeyError: + LOG.error('Invalid fsm_update message received!') + + def handle_fsm_step(self, message): + pass diff --git a/tfw/internals/event_handling/fsm_aware_event_handler.py b/tfw/internals/event_handling/fsm_aware_event_handler.py new file mode 100644 index 0000000..a128de8 --- /dev/null +++ b/tfw/internals/event_handling/fsm_aware_event_handler.py @@ -0,0 +1,19 @@ +from .event_handler import EventHandler +from .fsm_aware import FSMAware + + +class FSMAwareEventHandler(EventHandler, FSMAware): + # pylint: disable=abstract-method + """ + Abstract base class for EventHandlers which automatically + keep track of the state of the TFW FSM. + """ + def __init__(self, connector): + EventHandler.__init__(self, connector) + FSMAware.__init__(self) + + def _event_callback(self, message): + self.process_message(message) + + def handle_fsm_step(self, message): + self.handle_event(message, self.connector) diff --git a/tfw/internals/event_handling/signed_control_event_handler.py b/tfw/internals/event_handling/signed_control_event_handler.py new file mode 100644 index 0000000..545a5fc --- /dev/null +++ b/tfw/internals/event_handling/signed_control_event_handler.py @@ -0,0 +1,10 @@ +from .control_event_handler import ControlEventHandler +from .signed_event_handler import SignedEventHandler + +class SignedControlEventHandler(ControlEventHandler, SignedEventHandler): + # pylint: disable=abstract-method + def _validate_message(self, message): + return ( + ControlEventHandler._validate_message(self, message) and + SignedEventHandler._validate_message(self, message) + ) diff --git a/tfw/internals/event_handling/signed_event_handler.py b/tfw/internals/event_handling/signed_event_handler.py new file mode 100644 index 0000000..15f4439 --- /dev/null +++ b/tfw/internals/event_handling/signed_event_handler.py @@ -0,0 +1,20 @@ +import logging + +from tfw.internals.crypto import KeyManager, verify_message + +from .event_handler import EventHandler + +LOG = logging.getLogger(__name__) + + +# pylint: disable=abstract-method +class SignedEventHandler(EventHandler): + def __init__(self, connector): + self._auth_key = KeyManager().auth_key + super().__init__(connector) + + def _validate_message(self, message): + is_valid = verify_message(self._auth_key, message) + if not is_valid: + LOG.error('Message does not have valid signature: %s', message) + return is_valid diff --git a/tfw/internals/event_handling/test_event_handling/__init__.py b/tfw/internals/event_handling/test_event_handling/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tfw/internals/event_handling/test_event_handling/test_control_event_handler.py b/tfw/internals/event_handling/test_event_handling/test_control_event_handler.py new file mode 100644 index 0000000..057a9c5 --- /dev/null +++ b/tfw/internals/event_handling/test_event_handling/test_control_event_handler.py @@ -0,0 +1,33 @@ +from tfw.internals.networking import Intent + +from .util import DummyConnector, simulate_event +from ..control_event_handler import ControlEventHandler + + +def test_receives_control_intent(): + eh = ControlEventHandler(DummyConnector()) + messages = [] + eh.handle_event = lambda msg, _: messages.append(msg) + + test_msg = {"key": "cica", "intent": Intent.CONTROL.value} + simulate_event(eh, test_msg) + assert messages[0] == test_msg + + +def test_no_intent_defaults_to_control(): + eh = ControlEventHandler(DummyConnector()) + messages = [] + eh.handle_event = lambda msg, _: messages.append(msg) + + test_msg = {"key": "cica"} + simulate_event(eh, test_msg) + assert messages[0] == test_msg + + +def test_ignores_event_intent(): + eh = ControlEventHandler(DummyConnector()) + messages = [] + eh.handle_event = lambda msg, _: messages.append(msg) + + simulate_event(eh, {"key": "cica", "intent": Intent.EVENT.value}) + assert not messages diff --git a/tfw/internals/event_handling/test_event_handling/test_event_handler.py b/tfw/internals/event_handling/test_event_handling/test_event_handler.py new file mode 100644 index 0000000..e206b46 --- /dev/null +++ b/tfw/internals/event_handling/test_event_handling/test_event_handler.py @@ -0,0 +1,213 @@ +# pylint: disable=redefined-outer-name,attribute-defined-outside-init +from secrets import token_urlsafe +from random import randint + +import pytest + +from ..event_handler_factory_base import EventHandlerFactoryBase +from ..event_handler import EventHandler +from ..control_event_handler import ControlEventHandler +from ..fsm_aware_event_handler import FSMAwareEventHandler + + +class MockEventHandlerFactory(EventHandlerFactoryBase): + def _build_connector(self): + return MockConnector() + + +class MockConnector: + def __init__(self): + self.keys = [] + self._on_message = None + + def simulate_message(self, message): + self._on_message(message) + + def register_callback(self, callback): + self._on_message = callback + + def subscribe(self, *keys): + self.keys.extend(keys) + + def unsubscribe(self, *keys): + for key in keys: + self.keys.remove(key) + + def send_message(self, message, scope=None): + pass + + def close(self): + pass + + +class MockEventHandlerStub: + def __init__(self): + self.connector = None + self.last_message = None + self.cleaned_up = False + self.started = False + + def start(self): + self.started = True + + def cleanup(self): + self.cleaned_up = True + + +class MockEventHandler(MockEventHandlerStub): + # pylint: disable=unused-argument + def handle_event(self, message, connector): + self.last_message = message + + +class MockCallable(MockEventHandlerStub): + def __call__(self, message, connector): + self.last_message = message + + +@pytest.fixture +def test_msg(): + yield token_urlsafe(randint(16, 64)) + + +@pytest.fixture +def test_keys(): + yield [ + token_urlsafe(randint(2, 8)) + for _ in range(randint(16, 32)) + ] + + +def test_build_from_object(test_keys, test_msg): + mock_eh = MockEventHandlerStub() + def handle_event(message, connector): + raise RuntimeError(message, connector.keys) + mock_eh.handle_event = handle_event + + assert not mock_eh.started + eh = MockEventHandlerFactory().build(mock_eh, keys=test_keys) + + assert mock_eh.started + assert mock_eh.connector is eh.connector + with pytest.raises(RuntimeError) as err: + eh.connector.simulate_message(test_msg) + msg, keys = err.value.args + assert msg == test_msg + assert keys == test_keys + assert not mock_eh.cleaned_up + eh.stop() + assert mock_eh.cleaned_up + + +def test_build_from_object_with_keys(test_keys, test_msg): + mock_eh = MockEventHandler() + mock_eh.keys = test_keys + + assert not mock_eh.started + eh = MockEventHandlerFactory().build(mock_eh) + + assert mock_eh.connector.keys == test_keys + assert eh.connector is mock_eh.connector + assert mock_eh.started + assert not mock_eh.last_message + eh.connector.simulate_message(test_msg) + assert mock_eh.last_message == test_msg + assert not mock_eh.cleaned_up + EventHandler.stop_all_instances() + assert mock_eh.cleaned_up + + +def test_build_from_simple_object(test_keys, test_msg): + class SimpleMockEventHandler: + # pylint: disable=no-self-use + def handle_event(self, message, connector): + raise RuntimeError(message) + + mock_eh = SimpleMockEventHandler() + eh = MockEventHandlerFactory().build(mock_eh, keys=test_keys) + + with pytest.raises(RuntimeError) as err: + eh.connector.simulate_message(test_msg) + msg = err.value.args[0] + assert msg == test_msg + + +def test_build_type(): + class SomeControlHandler: + keys = ['cica'] + type_id = 'ControlEventHandler' + # pylint: disable=no-self-use + def handle_event(self, *_): + pass + + eh = MockEventHandlerFactory().build(SomeControlHandler()) + assert isinstance(eh, ControlEventHandler) + + eh = MockEventHandlerFactory().build(SomeControlHandler(), event_handler_type=EventHandler) + assert type(eh) == EventHandler # pylint: disable=unidiomatic-typecheck + + SomeControlHandler.type_id = 'FSMAwareEventHandler' + eh = MockEventHandlerFactory().build(SomeControlHandler()) + assert isinstance(eh, FSMAwareEventHandler) + + eh = MockEventHandlerFactory().build(lambda *_: None, keys=['cica']) + assert isinstance(eh, EventHandler) + + +def test_build_from_callable(test_keys, test_msg): + mock_eh = MockCallable() + + assert not mock_eh.started + eh = MockEventHandlerFactory().build(mock_eh, keys=test_keys) + + assert mock_eh.started + assert mock_eh.connector is eh.connector + assert eh.connector.keys == test_keys + assert not mock_eh.last_message + eh.connector.simulate_message(test_msg) + assert mock_eh.last_message == test_msg + assert not mock_eh.cleaned_up + eh.stop() + assert mock_eh.cleaned_up + + +def test_build_from_function(test_keys, test_msg): + def some_function(message, connector): + raise RuntimeError(message, connector.keys) + eh = MockEventHandlerFactory().build(some_function, keys=test_keys) + + assert eh.connector.keys == test_keys + with pytest.raises(RuntimeError) as err: + eh.connector.simulate_message(test_msg) + msg, keys = err.value.args + assert msg == test_msg + assert keys == test_keys + + +def test_build_from_lambda(test_keys, test_msg): + def assert_messages_equal(msg): + assert msg == test_msg + fun = lambda msg, sc: assert_messages_equal(msg) + eh = MockEventHandlerFactory().build(fun, keys=test_keys) + eh.connector.simulate_message(test_msg) + + +def test_build_raises_if_no_key(test_keys): + eh = MockEventHandler() + with pytest.raises(ValueError): + MockEventHandlerFactory().build(eh) + + def handle_event(*_): + pass + with pytest.raises(ValueError): + MockEventHandlerFactory().build(handle_event) + + with pytest.raises(ValueError): + MockEventHandlerFactory().build(lambda msg, sc: None) + + WithKeysEventHandler = EventHandler + WithKeysEventHandler.keys = test_keys + MockEventHandlerFactory().build(eh, event_handler_type=WithKeysEventHandler) + + eh.keys = test_keys + MockEventHandlerFactory().build(eh) diff --git a/tfw/internals/event_handling/test_event_handling/test_fsm_aware_event_handler.py b/tfw/internals/event_handling/test_event_handling/test_fsm_aware_event_handler.py new file mode 100644 index 0000000..c08b8d0 --- /dev/null +++ b/tfw/internals/event_handling/test_event_handling/test_fsm_aware_event_handler.py @@ -0,0 +1,71 @@ +# pylint: disable=redefined-outer-name +import pytest + +from tfw.internals.networking import Intent +from tfw.internals.crypto import KeyManager, sign_message + +from .util import DummyConnector, simulate_event +from ..fsm_aware_event_handler import FSMAwareEventHandler + + +@pytest.fixture +def key(): + yield KeyManager().auth_key + + +@pytest.fixture +def fsm_update_msg(): + yield { + 'key': 'fsm.update', + 'intent': Intent.EVENT.value, + 'scope': 'broadcast', + 'current_state': '1', + 'in_accepted_state': False, + 'last_event': { + 'from_state': '0', + 'timestamp': '2019-09-04T16:51:15.587555', + 'to_state': '1', 'trigger': + 'step_1' + }, + 'valid_transitions': [{'trigger': 'step_2'}] + } + + +def test_ignores_unauthenticated(fsm_update_msg): + messages = [] + eh = FSMAwareEventHandler(DummyConnector()) + eh.handle_event = lambda msg, _: messages.append(msg) + + simulate_event(eh, fsm_update_msg) + assert not messages + + +def test_ignores_other_keys(key): + messages = [] + eh = FSMAwareEventHandler(DummyConnector()) + eh.handle_event = lambda msg, _: messages.append(msg) + + test_msg = {"key": "not.fsm.update"} + sign_message(key, test_msg) + simulate_event(eh, test_msg) + assert not messages + + +def test_accepts_authenticated(key, fsm_update_msg): + messages = [] + eh = FSMAwareEventHandler(DummyConnector()) + eh.handle_event = lambda msg, _: messages.append(msg) + + sign_message(key, fsm_update_msg) + simulate_event(eh, fsm_update_msg) + assert messages[0] == fsm_update_msg + assert eh.fsm_state == '1' + assert not eh.fsm_in_accepted_state + assert len(eh.fsm_event_log) == 1 + + fsm_update_msg['in_accepted_state'] = True + fsm_update_msg['current_state'] = '2' + sign_message(key, fsm_update_msg) + simulate_event(eh, fsm_update_msg) + assert eh.fsm_state == '2' + assert eh.fsm_in_accepted_state diff --git a/tfw/internals/event_handling/test_event_handling/test_signed_event_handler.py b/tfw/internals/event_handling/test_event_handling/test_signed_event_handler.py new file mode 100644 index 0000000..fbf7e43 --- /dev/null +++ b/tfw/internals/event_handling/test_event_handling/test_signed_event_handler.py @@ -0,0 +1,32 @@ +# pylint: disable=redefined-outer-name +import pytest + +from tfw.internals.crypto import sign_message, KeyManager + +from .util import DummyConnector, simulate_event +from ..signed_event_handler import SignedEventHandler + + +@pytest.fixture +def key(): + yield KeyManager().auth_key + + +def test_ignores_unauthenticated(): + eh = SignedEventHandler(DummyConnector()) + messages = [] + eh.handle_event = lambda msg, _: messages.append(msg) + + simulate_event(eh, {"key": "cica"}) + assert not messages + + +def test_accepts_authenticated(key): + eh = SignedEventHandler(DummyConnector()) + messages = [] + eh.handle_event = lambda msg, _: messages.append(msg) + + test_msg = {"key": "cica"} + sign_message(key, test_msg) + simulate_event(eh, test_msg) + assert messages[0] == test_msg diff --git a/tfw/internals/event_handling/test_event_handling/util.py b/tfw/internals/event_handling/test_event_handling/util.py new file mode 100644 index 0000000..1dbd171 --- /dev/null +++ b/tfw/internals/event_handling/test_event_handling/util.py @@ -0,0 +1,11 @@ +class DummyConnector(): + def send_message(self, message, scope=None, intent=None): + pass + + def close(self): + pass + + +def simulate_event(event_handler, message): + # pylint: disable=protected-access + event_handler._event_callback(message) diff --git a/tfw/internals/event_handling/type_id_registry/__init__.py b/tfw/internals/event_handling/type_id_registry/__init__.py new file mode 100644 index 0000000..45ed6e6 --- /dev/null +++ b/tfw/internals/event_handling/type_id_registry/__init__.py @@ -0,0 +1 @@ +from .type_id_registry import TypeIdRegistryMixin diff --git a/tfw/internals/event_handling/type_id_registry/test_type_id_registry.py b/tfw/internals/event_handling/type_id_registry/test_type_id_registry.py new file mode 100644 index 0000000..df6a994 --- /dev/null +++ b/tfw/internals/event_handling/type_id_registry/test_type_id_registry.py @@ -0,0 +1,46 @@ +import pytest + +from .type_id_registry import TypeIdRegistryMixin + + +def test_registry(): + class A(TypeIdRegistryMixin): + _type_id_registry = {} + class B(A): + pass + class C(A): + pass + class D(B): + pass + + # pylint: disable=protected-access + assert A._type_id_registry['B'] == A.get_type('B') == B + assert A._type_id_registry['C'] == A.get_type('C') == C + assert A._type_id_registry['D'] == A.get_type('D') == D + + +def test_build_by_type_id(): + class D(TypeIdRegistryMixin): + _type_id_registry = {} + built = [] + class E(D): + def __init__(self): + built.append(self) + class F(D): + def __init__(self): + built.append(self) + raise RuntimeError('cica') + class H(E): + pass + + D.build_type('E') + assert isinstance(built[0], E) + + with pytest.raises(RuntimeError) as err: + D.build_type('F') + assert err.value.args[0] == 'cica' + assert isinstance(built[1], F) + + D.build_type('H') + assert isinstance(built[2], H) + assert isinstance(built[2], E) diff --git a/tfw/internals/event_handling/type_id_registry/type_id_registry.py b/tfw/internals/event_handling/type_id_registry/type_id_registry.py new file mode 100644 index 0000000..7970392 --- /dev/null +++ b/tfw/internals/event_handling/type_id_registry/type_id_registry.py @@ -0,0 +1,24 @@ +from abc import ABCMeta +from contextlib import suppress + + +class TypeIdRegistryMeta(ABCMeta): + def __init__(cls, name, bases, attrs): + with suppress(AttributeError): + if cls.__name__ in cls._type_id_registry: + raise RuntimeError('Type id must be unique!') + cls._type_id_registry[cls.__name__] = cls + super().__init__(name, bases, attrs) + + +class TypeIdRegistryMixin(metaclass=TypeIdRegistryMeta): + @classmethod + def get_type(cls, type_id): + try: + return cls._type_id_registry[type_id] + except KeyError: + raise RuntimeError(f'No type with id {type_id}!') + + @classmethod + def build_type(cls, type_id, *args, **kwargs): + return cls.get_type(type_id)(*args, **kwargs) diff --git a/tfw/internals/inotify/__init__.py b/tfw/internals/inotify/__init__.py new file mode 100644 index 0000000..1b97c47 --- /dev/null +++ b/tfw/internals/inotify/__init__.py @@ -0,0 +1,6 @@ +from .inotify import InotifyObserver +from .inotify import ( + InotifyFileCreatedEvent, InotifyFileModifiedEvent, InotifyFileMovedEvent, + InotifyFileDeletedEvent, InotifyDirCreatedEvent, InotifyDirModifiedEvent, + InotifyDirMovedEvent, InotifyDirDeletedEvent +) diff --git a/tfw/internals/inotify/inotify.py b/tfw/internals/inotify/inotify.py new file mode 100644 index 0000000..867a704 --- /dev/null +++ b/tfw/internals/inotify/inotify.py @@ -0,0 +1,192 @@ +# pylint: disable=too-few-public-methods +import logging +from typing import Iterable +from time import time +from os.path import abspath, dirname, isdir + +from watchdog.observers import Observer +from watchdog.events import FileSystemMovedEvent, PatternMatchingEventHandler +from watchdog.events import ( + FileCreatedEvent, FileModifiedEvent, FileMovedEvent, FileDeletedEvent, + DirCreatedEvent, DirModifiedEvent, DirMovedEvent, DirDeletedEvent +) + +# This is done to prevent inotify event logs triggering themselves (infinite log recursion) +logging.getLogger('watchdog.observers.inotify_buffer').propagate = False + + +class InotifyEvent: + def __init__(self, src_path): + self.date = time() + self.src_path = src_path + + def __str__(self): + return self.__repr__() + + def __repr__(self): + return f'{self.__class__.__name__}({self.src_path})' + + +class InotifyMovedEvent(InotifyEvent): + def __init__(self, src_path, dest_path): + self.dest_path = dest_path + super().__init__(src_path) + + def __repr__(self): + return f'{self.__class__.__name__}({self.src_path}, {self.dest_path})' + + +class InotifyFileCreatedEvent(InotifyEvent): + pass + + +class InotifyFileModifiedEvent(InotifyEvent): + pass + + +class InotifyFileMovedEvent(InotifyMovedEvent): + pass + + +class InotifyFileDeletedEvent(InotifyEvent): + pass + + +class InotifyDirCreatedEvent(InotifyEvent): + pass + + +class InotifyDirModifiedEvent(InotifyEvent): + pass + + +class InotifyDirMovedEvent(InotifyMovedEvent): + pass + + +class InotifyDirDeletedEvent(InotifyEvent): + pass + + +class InotifyObserver: + def __init__(self, path, patterns=None, exclude=None, recursive=False): + self._files = [] + self._paths = path + self._patterns = patterns or [] + self._exclude = exclude + self._recursive = recursive + self._observer = Observer() + self._reset() + + def _reset(self): + if isinstance(self._paths, str): + self._paths = [self._paths] + if isinstance(self._paths, Iterable): + self._extract_files_from_paths() + else: + raise ValueError('Expected one or more string paths.') + + patterns = self._files+self.patterns + handler = PatternMatchingEventHandler(patterns if patterns else None, self.exclude) + handler.on_any_event = self._dispatch_event + self._observer.unschedule_all() + for path in self.paths: + self._observer.schedule(handler, path, self._recursive) + + def _extract_files_from_paths(self): + files, paths = [], [] + for path in self._paths: + path = abspath(path) + if isdir(path): + paths.append(path) + else: + paths.append(dirname(path)) + files.append(path) + self._files, self._paths = files, paths + + @property + def paths(self): + return self._paths + + @paths.setter + def paths(self, paths): + self._paths = paths + self._reset() + + @property + def patterns(self): + return self._patterns + + @patterns.setter + def patterns(self, patterns): + self._patterns = patterns or [] + self._reset() + + @property + def exclude(self): + return self._exclude + + @exclude.setter + def exclude(self, exclude): + self._exclude = exclude + self._reset() + + def start(self): + self._observer.start() + + def stop(self): + self._observer.stop() + self._observer.join() + + def _dispatch_event(self, event): + event_to_action = { + InotifyFileCreatedEvent : self.on_created, + InotifyFileModifiedEvent : self.on_modified, + InotifyFileMovedEvent : self.on_moved, + InotifyFileDeletedEvent : self.on_deleted, + InotifyDirCreatedEvent : self.on_created, + InotifyDirModifiedEvent : self.on_modified, + InotifyDirMovedEvent : self.on_moved, + InotifyDirDeletedEvent : self.on_deleted + } + + event = self._transform_event(event) + self.on_any_event(event) + event_to_action[type(event)](event) + + @staticmethod + def _transform_event(event): + watchdog_to_inotify = { + FileCreatedEvent : InotifyFileCreatedEvent, + FileModifiedEvent : InotifyFileModifiedEvent, + FileMovedEvent : InotifyFileMovedEvent, + FileDeletedEvent : InotifyFileDeletedEvent, + DirCreatedEvent : InotifyDirCreatedEvent, + DirModifiedEvent : InotifyDirModifiedEvent, + DirMovedEvent : InotifyDirMovedEvent, + DirDeletedEvent : InotifyDirDeletedEvent + } + + try: + cls = watchdog_to_inotify[type(event)] + except KeyError: + raise NameError('Watchdog API returned an unknown event.') + + if isinstance(event, FileSystemMovedEvent): + return cls(event.src_path, event.dest_path) + return cls(event.src_path) + + def on_any_event(self, event): + pass + + def on_created(self, event): + pass + + def on_modified(self, event): + pass + + def on_moved(self, event): + pass + + def on_deleted(self, event): + pass diff --git a/tfw/internals/inotify/test_inotify.py b/tfw/internals/inotify/test_inotify.py new file mode 100644 index 0000000..3fabf6d --- /dev/null +++ b/tfw/internals/inotify/test_inotify.py @@ -0,0 +1,185 @@ +# pylint: disable=redefined-outer-name + +from queue import Empty, Queue +from secrets import token_urlsafe +from pathlib import Path +from shutil import rmtree +from os.path import join +from os import mkdir, remove, rename +from tempfile import TemporaryDirectory +from contextlib import suppress + +import watchdog +import pytest + +from .inotify import InotifyObserver +from .inotify import ( + InotifyFileCreatedEvent, InotifyFileModifiedEvent, InotifyFileMovedEvent, + InotifyFileDeletedEvent, InotifyDirCreatedEvent, InotifyDirModifiedEvent, + InotifyDirMovedEvent, InotifyDirDeletedEvent +) + +with suppress(AttributeError): + watchdog.observers.inotify_buffer.InotifyBuffer.delay = 0 + + +class InotifyContext: + def __init__(self, workdir, subdir, subfile, observer): + self.missing_events = 0 + self.workdir = workdir + self.subdir = subdir + self.subfile = subfile + self.observer = observer + + self.event_to_queue = { + InotifyFileCreatedEvent : self.observer.create_queue, + InotifyFileModifiedEvent : self.observer.modify_queue, + InotifyFileMovedEvent : self.observer.move_queue, + InotifyFileDeletedEvent : self.observer.delete_queue, + InotifyDirCreatedEvent : self.observer.create_queue, + InotifyDirModifiedEvent : self.observer.modify_queue, + InotifyDirMovedEvent : self.observer.move_queue, + InotifyDirDeletedEvent : self.observer.delete_queue + } + + def create_random_file(self, dirname, extension): + filename = self.join(f'{dirname}/{generate_name()}{extension}') + Path(filename).touch() + return filename + + def create_random_folder(self, basepath): + dirname = self.join(f'{basepath}/{generate_name()}') + mkdir(dirname) + return dirname + + def join(self, path): + return join(self.workdir, path) + + def check_event(self, event_type, paths): + if isinstance(paths, str): + paths = [paths] + self.missing_events += len(paths) + for _ in range(len(paths)): + event = self.event_to_queue[event_type].get(timeout=0.1) + assert isinstance(event, event_type) + assert event.src_path in paths + + def check_empty(self, event_type): + with pytest.raises(Empty): + self.event_to_queue[event_type].get(timeout=0.1) + + def check_any(self): + attrs = self.observer.__dict__.values() + total = sum([q.qsize() for q in attrs if isinstance(q, Queue)]) + return total + self.missing_events == len(self.observer.any_list) + + +class InotifyTestObserver(InotifyObserver): + def __init__(self, paths, patterns=None, exclude=None, recursive=False): + self.any_list = [] + self.create_queue, self.modify_queue, self.move_queue, self.delete_queue = [Queue() for _ in range(4)] + super().__init__(paths, patterns, exclude, recursive) + + def on_any_event(self, event): + self.any_list.append(event) + + def on_created(self, event): + self.create_queue.put(event) + + def on_modified(self, event): + self.modify_queue.put(event) + + def on_moved(self, event): + self.move_queue.put(event) + + def on_deleted(self, event): + self.delete_queue.put(event) + +def generate_name(): + return token_urlsafe(16) + +@pytest.fixture() +def context(): + with TemporaryDirectory() as workdir: + subdir = join(workdir, generate_name()) + subfile = join(subdir, generate_name() + '.txt') + mkdir(subdir) + Path(subfile).touch() + monitor = InotifyTestObserver(workdir, recursive=True) + monitor.start() + yield InotifyContext(workdir, subdir, subfile, monitor) + +def test_create(context): + newfile = context.create_random_file(context.workdir, '.txt') + context.check_event(InotifyFileCreatedEvent, newfile) + newdir = context.create_random_folder(context.workdir) + context.check_event(InotifyDirCreatedEvent, newdir) + assert context.check_any() + +def test_modify(context): + with open(context.subfile, 'wb', buffering=0) as ofile: + ofile.write(b'text') + context.check_event(InotifyFileModifiedEvent, context.subfile) + while True: + try: + context.observer.modify_queue.get(timeout=0.1) + context.missing_events += 1 + except Empty: + break + rename(context.subfile, context.subfile + '_new') + context.check_event(InotifyDirModifiedEvent, context.subdir) + assert context.check_any() + +def test_file_move(context): + rename(context.subfile, context.subfile + '_new') + context.check_event(InotifyFileMovedEvent, context.subfile) + assert context.check_any() + +def test_folder_move(context): + remove(context.subfile) + rename(context.subdir, context.subdir + '_new') + context.check_event(InotifyDirMovedEvent, context.subdir) + assert context.check_any() + +def test_delete(context): + rmtree(context.subdir) + context.check_event(InotifyFileDeletedEvent, context.subfile) + context.check_event(InotifyDirDeletedEvent, context.subdir) + assert context.check_any() + +def test_paths(context): + context.observer.paths = context.subdir + newdir = context.create_random_folder(context.workdir) + newfile = context.create_random_file(context.subdir, '.txt') + context.check_event(InotifyDirModifiedEvent, context.subdir) + context.check_event(InotifyFileCreatedEvent, newfile) + context.observer.paths = [newdir, newfile] + remove(newfile) + context.check_event(InotifyFileDeletedEvent, newfile) + assert context.check_any() + context.observer.paths = context.workdir + +def test_patterns(context): + context.observer.patterns = ['*.txt'] + context.create_random_file(context.subdir, '.bin') + newfile = context.create_random_file(context.subdir, '.txt') + context.check_event(InotifyFileCreatedEvent, newfile) + context.check_empty(InotifyFileCreatedEvent) + assert context.check_any() + context.observer.patterns = None + +def test_exclude(context): + context.observer.exclude = ['*.txt'] + context.create_random_file(context.subdir, '.txt') + newfile = context.create_random_file(context.subdir, '.bin') + context.check_event(InotifyFileCreatedEvent, newfile) + context.check_empty(InotifyFileCreatedEvent) + assert context.check_any() + context.observer.exclude = None + +def test_stress(context): + newfiles = [] + for _ in range(1024): + newfiles.append(context.create_random_file(context.subdir, '.txt')) + context.check_event(InotifyFileCreatedEvent, newfiles) + assert context.check_any() diff --git a/lib/tfw/decorators/lazy_property.py b/tfw/internals/lazy.py similarity index 86% rename from lib/tfw/decorators/lazy_property.py rename to tfw/internals/lazy.py index 14ad788..0bdd35f 100644 --- a/lib/tfw/decorators/lazy_property.py +++ b/tfw/internals/lazy.py @@ -1,6 +1,3 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. - from functools import update_wrapper, wraps diff --git a/tfw/internals/networking/__init__.py b/tfw/internals/networking/__init__.py new file mode 100644 index 0000000..520cbec --- /dev/null +++ b/tfw/internals/networking/__init__.py @@ -0,0 +1,5 @@ +from .serialization import serialize_tfw_msg, deserialize_tfw_msg, with_deserialize_tfw_msg, message_bytes +from .zmq_connector import ZMQConnector, ZMQDownlinkConnector, ZMQUplinkConnector +from .zmq_listener import ZMQListener +from .scope import Scope +from .intent import Intent diff --git a/tfw/internals/networking/intent.py b/tfw/internals/networking/intent.py new file mode 100644 index 0000000..a13c7bb --- /dev/null +++ b/tfw/internals/networking/intent.py @@ -0,0 +1,6 @@ +from enum import Enum + + +class Intent(Enum): + CONTROL = 'control' + EVENT = 'event' diff --git a/tfw/internals/networking/scope.py b/tfw/internals/networking/scope.py new file mode 100644 index 0000000..639461b --- /dev/null +++ b/tfw/internals/networking/scope.py @@ -0,0 +1,7 @@ +from enum import Enum + + +class Scope(Enum): + ZMQ = 'zmq' + WEBSOCKET = 'websocket' + BROADCAST = 'broadcast' diff --git a/lib/tfw/networking/serialization.py b/tfw/internals/networking/serialization.py similarity index 95% rename from lib/tfw/networking/serialization.py rename to tfw/internals/networking/serialization.py index c54609b..e329a6c 100644 --- a/lib/tfw/networking/serialization.py +++ b/tfw/internals/networking/serialization.py @@ -1,5 +1,3 @@ -# Copyright (C) 2018 Avatao.com Innovative Learning Kft. -# All Rights Reserved. See LICENSE file for details. """ TFW JSON message format diff --git a/tfw/internals/networking/test_networking.py b/tfw/internals/networking/test_networking.py new file mode 100644 index 0000000..deb86c5 --- /dev/null +++ b/tfw/internals/networking/test_networking.py @@ -0,0 +1,213 @@ +# pylint: disable=redefined-outer-name +from os.path import join +from secrets import token_urlsafe +from random import randint +from tempfile import TemporaryDirectory +from contextlib import suppress + +import pytest +from tornado.ioloop import IOLoop + +from tfw.internals.networking import ZMQListener, ZMQConnector, Scope, Intent + + +@pytest.fixture +def _listener_and_connector(): + with TemporaryDirectory() as tmpdir: + down_sock = join(tmpdir, 'down') + up_sock = join(tmpdir, 'up') + server_downlink = f'ipc://{down_sock}' + server_uplink = f'ipc://{up_sock}' + + listener = ZMQListener(server_downlink, server_uplink) + connector = ZMQConnector(server_uplink, server_downlink) + yield listener, connector + listener.close() + connector.close() + + +@pytest.fixture +def zmq_listener(_listener_and_connector): + listener, _ = _listener_and_connector + yield listener + + +@pytest.fixture +def zmq_connector(_listener_and_connector): + _, connector = _listener_and_connector + yield connector + + +def run_ioloop_once(): + # hack: we have to wait for the messages to get through + # the network stack of the OS while the IOLoop is waiting + # for them via select/epoll/kqueue + IOLoop.current().call_later(0.1, IOLoop.current().stop) + IOLoop.current().start() + + +@pytest.fixture +def test_messages(): + random_str = lambda: token_urlsafe(randint(4, 8)) + yield [ + { + 'key': random_str(), + random_str(): randint(8192, 16384), + random_str(): random_str(), + random_str(): { + random_str(): random_str(), + random_str(): {random_str(): random_str()} + }, + random_str(): [random_str(), random_str()] + } + for _ in range(randint(8, 16)) + ] + + +def wait_until_subscriber_connects(listener, connector): + # Warning: you are better off without comprehending how this works + # Reference: ZMQ PUB-SUB slow joiner problem + + # Wait until something can go through the connection + dummy = {'key': '-'} + while True: + listener.send_message(dummy) + with suppress(IOError): + if connector.recv_message(block=False) == dummy: + break + # Throw away leftover messages from last while loop + sentinel = {'key': '_'} + listener.send_message(sentinel) + while True: + with suppress(IOError): + if connector.recv_message(block=False) == sentinel: + break + + +def test_server_downlink(zmq_listener, zmq_connector, test_messages): + messages = [] + zmq_listener.register_callback(messages.append) + + for message in test_messages: + zmq_connector.send_message(message) + + run_ioloop_once() + + assert messages == test_messages + + +def test_connector_default_scope_is_zmq(zmq_listener, zmq_connector): + messages = [] + zmq_listener.register_callback(messages.append) + + zmq_connector.send_message({'key': 'cica'}) + + run_ioloop_once() + + assert messages[0]['scope'] == Scope.ZMQ.value + + +def test_connector_preserves_scope(zmq_listener, zmq_connector): + messages = [] + zmq_listener.register_callback(messages.append) + + zmq_connector.send_message({'key': 'cica', 'scope': Scope.WEBSOCKET.value}) + + run_ioloop_once() + + assert messages[0]['scope'] == Scope.WEBSOCKET.value + + +def test_connector_scope_overrides_message_scope(zmq_listener, zmq_connector): + messages = [] + zmq_listener.register_callback(messages.append) + + zmq_connector.send_message( + {'key': 'cica', 'scope': Scope.WEBSOCKET.value}, + scope=Scope.ZMQ + ) + + run_ioloop_once() + + assert messages[0]['scope'] == Scope.ZMQ.value + + +def test_connector_adds_intent(zmq_listener, zmq_connector): + messages = [] + zmq_listener.register_callback(messages.append) + + zmq_connector.send_message( + {'key': 'cica'}, + intent=Intent.EVENT + ) + + run_ioloop_once() + + assert messages[0]['intent'] == Intent.EVENT.value + + +def test_connector_preserves_intent(zmq_listener, zmq_connector): + messages = [] + zmq_listener.register_callback(messages.append) + + zmq_connector.send_message({'key': 'cica', 'intent': Intent.EVENT.value}) + + run_ioloop_once() + + assert messages[0]['intent'] == Intent.EVENT.value + + +def test_server_uplink(zmq_listener, zmq_connector, test_messages): + messages = [] + zmq_connector.subscribe('') + zmq_connector.register_callback(messages.append) + + for message in test_messages: + zmq_listener.send_message(message) + + run_ioloop_once() + + assert messages == test_messages + + +def test_connector_downlink_subscribe(zmq_listener, zmq_connector): + key1_messages = [{'key': '1', 'data': i} for i in range(randint(128, 256))] + key2_messages = [{'key': '2', 'data': i} for i in range(randint(128, 256))] + all_messages = key1_messages + key2_messages + + messages = [] + zmq_connector.subscribe('1') + zmq_connector.register_callback(messages.append) + + for message in all_messages: + zmq_listener.send_message(message) + + run_ioloop_once() + + assert messages == key1_messages + assert all((msg not in messages for msg in key2_messages)) + + +def test_listener_sync_recv(zmq_listener, zmq_connector, test_messages): + for message in test_messages: + zmq_connector.send_message(message) + assert zmq_listener.recv_message() == message + + +def test_connector_sync_recv(zmq_listener, zmq_connector, test_messages): + zmq_connector.subscribe('') + wait_until_subscriber_connects(zmq_listener, zmq_connector) + for message in test_messages: + zmq_listener.send_message(message) + assert zmq_connector.recv_message() == message + + +def test_sync_recv_raises_if_callback_is_registered(zmq_listener, zmq_connector): + zmq_listener.register_callback(lambda msg: None) + zmq_connector.register_callback(lambda msg: None) + + with pytest.raises(RuntimeError): + zmq_listener.recv_message() + + with pytest.raises(RuntimeError): + zmq_connector.recv_message() diff --git a/tfw/internals/networking/zmq_connector.py b/tfw/internals/networking/zmq_connector.py new file mode 100644 index 0000000..cd1111e --- /dev/null +++ b/tfw/internals/networking/zmq_connector.py @@ -0,0 +1,74 @@ +import logging + +import zmq +from zmq.eventloop.zmqstream import ZMQStream + +from .scope import Scope +from .serialization import ( + serialize_tfw_msg, + deserialize_tfw_msg, + with_deserialize_tfw_msg +) + +LOG = logging.getLogger(__name__) + + +class ZMQDownlinkConnector: + def __init__(self, connect_addr): + self._zmq_sub_socket = zmq.Context.instance().socket(zmq.SUB) + self._zmq_sub_socket.setsockopt(zmq.RCVHWM, 0) + self._zmq_sub_socket.connect(connect_addr) + self._zmq_sub_stream = ZMQStream(self._zmq_sub_socket) + + def subscribe(self, *keys): + for key in keys: + self._zmq_sub_socket.setsockopt_string(zmq.SUBSCRIBE, key) + + def unsubscribe(self, *keys): + for key in keys: + self._zmq_sub_socket.setsockopt_string(zmq.UNSUBSCRIBE, key) + + def register_callback(self, callback): + callback = with_deserialize_tfw_msg(callback) if callback else None + self._zmq_sub_stream.on_recv(callback) + + def recv_message(self, *, block=True): + if self._zmq_sub_stream.receiving(): + raise RuntimeError('Synchronous recv() called while a callback is registered!') + flags = 0 if block else zmq.NOBLOCK + try: + return deserialize_tfw_msg(*self._zmq_sub_socket.recv_multipart(flags)) + except zmq.ZMQError: + raise IOError("No data available to recv!") + + def close(self): + self._zmq_sub_stream.close() + + +class ZMQUplinkConnector: + def __init__(self, connect_addr): + self._zmq_push_socket = zmq.Context.instance().socket(zmq.PUSH) + self._zmq_push_socket.setsockopt(zmq.SNDHWM, 0) + self._zmq_push_socket.connect(connect_addr) + + def send_message(self, message, scope=None, intent=None): + if 'scope' not in message: + message['scope'] = Scope.ZMQ.value + if scope is not None: + message['scope'] = scope.value + if intent is not None: + message['intent'] = intent.value + self._zmq_push_socket.send_multipart(serialize_tfw_msg(message)) + + def close(self): + self._zmq_push_socket.close() + + +class ZMQConnector(ZMQDownlinkConnector, ZMQUplinkConnector): + def __init__(self, downlink_connect_addr, uplink_connect_addr): + ZMQDownlinkConnector.__init__(self, downlink_connect_addr) + ZMQUplinkConnector.__init__(self, uplink_connect_addr) + + def close(self): + ZMQDownlinkConnector.close(self) + ZMQUplinkConnector.close(self) diff --git a/tfw/internals/networking/zmq_listener.py b/tfw/internals/networking/zmq_listener.py new file mode 100644 index 0000000..2cf6b3d --- /dev/null +++ b/tfw/internals/networking/zmq_listener.py @@ -0,0 +1,59 @@ +import logging + +import zmq +from zmq.eventloop.zmqstream import ZMQStream + +from .serialization import ( + serialize_tfw_msg, + deserialize_tfw_msg, + with_deserialize_tfw_msg +) + +LOG = logging.getLogger(__name__) + + +class ZMQDownlinkListener: + def __init__(self, bind_addr): + self._zmq_pull_socket = zmq.Context.instance().socket(zmq.PULL) + self._zmq_pull_socket.setsockopt(zmq.RCVHWM, 0) + self._zmq_pull_socket.bind(bind_addr) + self._zmq_pull_stream = ZMQStream(self._zmq_pull_socket) + + def register_callback(self, callback): + callback = with_deserialize_tfw_msg(callback) if callback else None + self._zmq_pull_stream.on_recv(callback) + + def recv_message(self, *, block=True): + if self._zmq_pull_stream.receiving(): + raise RuntimeError('Synchronous recv() called while a callback is registered!') + flags = 0 if block else zmq.NOBLOCK + try: + return deserialize_tfw_msg(*self._zmq_pull_socket.recv_multipart(flags)) + except zmq.ZMQError: + raise IOError("No data available to recv!") + + def close(self): + self._zmq_pull_stream.close() + + +class ZMQUplinkListener: + def __init__(self, bind_addr): + self._zmq_pub_socket = zmq.Context.instance().socket(zmq.PUB) + self._zmq_pub_socket.setsockopt(zmq.SNDHWM, 0) + self._zmq_pub_socket.bind(bind_addr) + + def send_message(self, message: dict): + self._zmq_pub_socket.send_multipart(serialize_tfw_msg(message)) + + def close(self): + self._zmq_pub_socket.close() + + +class ZMQListener(ZMQDownlinkListener, ZMQUplinkListener): + def __init__(self, downlink_bind_addr, uplink_bind_addr): + ZMQDownlinkListener.__init__(self, downlink_bind_addr) + ZMQUplinkListener.__init__(self, uplink_bind_addr) + + def close(self): + ZMQDownlinkListener.close(self) + ZMQUplinkListener.close(self) diff --git a/tfw/internals/ref_counter/__init__.py b/tfw/internals/ref_counter/__init__.py new file mode 100644 index 0000000..25f2d50 --- /dev/null +++ b/tfw/internals/ref_counter/__init__.py @@ -0,0 +1 @@ +from .ref_counter import RefCounter diff --git a/tfw/internals/ref_counter/ref_counter.py b/tfw/internals/ref_counter/ref_counter.py new file mode 100644 index 0000000..4ff7889 --- /dev/null +++ b/tfw/internals/ref_counter/ref_counter.py @@ -0,0 +1,45 @@ +from os import remove +from contextlib import contextmanager +from fcntl import flock, LOCK_EX, LOCK_UN + + +class RefCounter: + def __init__(self, lockpath): + self.lockpath = lockpath + self._lockfile = open(self.lockpath, 'a+') + with self.locked(): + counter = self._read_counter() + self._write_counter(counter+1) + + @contextmanager + def locked(self): + flock(self._lockfile, LOCK_EX) + yield + flock(self._lockfile, LOCK_UN) + + def _read_counter(self): + self._lockfile.seek(0) + try: + counter = int(self._lockfile.read()) + except ValueError: + counter = 0 + return counter + + def _write_counter(self, counter): + self._lockfile.seek(0) + self._lockfile.truncate() + self._lockfile.write(str(counter)) + self._lockfile.flush() + + def teardown_instance(self): + with self.locked(): + counter = self._read_counter() + if counter <= 1: + remove(self.lockpath) + self.deallocate() + else: + self._write_counter(counter-1) + self._lockfile.close() + + def deallocate(self): + pass diff --git a/tfw/internals/ref_counter/test_ref_counter.py b/tfw/internals/ref_counter/test_ref_counter.py new file mode 100644 index 0000000..6d65997 --- /dev/null +++ b/tfw/internals/ref_counter/test_ref_counter.py @@ -0,0 +1,72 @@ +# pylint: disable=redefined-outer-name +from dataclasses import dataclass +from textwrap import dedent +from os import chdir, mkfifo +from os.path import dirname, join, realpath +from signal import SIGINT +from subprocess import DEVNULL, Popen, PIPE +from tempfile import TemporaryDirectory + +import pytest + +from .ref_counter import RefCounter + + +@dataclass +class CounterContext: + lockpath: str + pipepath: str + + @property + def source(self): + return dedent(f'''\ + from time import sleep + from atexit import register + from ref_counter import RefCounter + + counter = RefCounter('{self.lockpath}') + register(counter.teardown_instance) + print(flush=True) + while True: + sleep(1) + ''') + + +@pytest.fixture +def context(): + with TemporaryDirectory() as workdir: + chdir(dirname(realpath(__file__))) + pipepath = join(workdir, 'test.pipe') + mkfifo(pipepath) + yield CounterContext(join(workdir, 'test.lock'), pipepath) + + +def test_increment_decrement(context): + counter, processes = 0, [] + for _ in range(5): + new_proc = Popen(['python3', '-c', context.source], stdout=PIPE, stderr=DEVNULL) + new_proc.stdout.readline() + processes.append(new_proc) + counter += 1 + for proc in processes: + with open(context.lockpath, 'r') as lock: + assert lock.read() == str(counter) + counter -= 1 + proc.send_signal(SIGINT) + proc.wait() + + +def test_deallocate(context): + state = False + def trigger(): + nonlocal state + state = True + refcounters = [] + for _ in range(32): + new_refc = RefCounter(context.lockpath) + new_refc.deallocate = trigger + refcounters.append(new_refc) + for refc in refcounters: + assert not state + refc.teardown_instance() + assert state diff --git a/tfw/internals/server/__init__.py b/tfw/internals/server/__init__.py new file mode 100644 index 0000000..73e3f40 --- /dev/null +++ b/tfw/internals/server/__init__.py @@ -0,0 +1 @@ +from .zmq_websocket_router import ZMQWebSocketRouter diff --git a/tfw/internals/server/tfw_router.py b/tfw/internals/server/tfw_router.py new file mode 100644 index 0000000..c2a18ab --- /dev/null +++ b/tfw/internals/server/tfw_router.py @@ -0,0 +1,22 @@ +from tfw.internals.networking import Scope + + +class TFWRouter: + def __init__(self, send_to_zmq, send_to_websockets): + self.send_to_zmq = send_to_zmq + self.send_to_websockets = send_to_websockets + + def route(self, message): + scope = Scope(message.pop('scope', 'zmq')) + + routing_table = { + Scope.ZMQ: self.send_to_zmq, + Scope.WEBSOCKET: self.send_to_websockets, + Scope.BROADCAST: self.broadcast + } + action = routing_table[scope] + action(message) + + def broadcast(self, message): + self.send_to_zmq(message) + self.send_to_websockets(message) diff --git a/tfw/internals/server/zmq_websocket_router.py b/tfw/internals/server/zmq_websocket_router.py new file mode 100644 index 0000000..7d8b3f5 --- /dev/null +++ b/tfw/internals/server/zmq_websocket_router.py @@ -0,0 +1,48 @@ +import json +import logging + +from tornado.websocket import WebSocketHandler + +from .tfw_router import TFWRouter + +LOG = logging.getLogger(__name__) + + +class ZMQWebSocketRouter(WebSocketHandler): + # pylint: disable=abstract-method,attribute-defined-outside-init + instances = set() + + def initialize(self, **kwargs): + self._listener = kwargs['listener'] + self.tfw_router = TFWRouter(self.send_to_zmq, self.send_to_websockets) + + def send_to_zmq(self, message): + self._listener.send_message(message) + + @classmethod + def send_to_websockets(cls, message): + for instance in cls.instances: + instance.write_message(message) + + def prepare(self): + type(self).instances.add(self) + + def on_close(self): + type(self).instances.remove(self) + + def open(self, *args, **kwargs): + LOG.debug('WebSocket connection initiated!') + self._listener.register_callback(self.zmq_callback) + + def zmq_callback(self, message): + LOG.debug('Received on ZMQ pull socket: %s', message) + self.tfw_router.route(message) + + def on_message(self, message): + message = json.loads(message) + LOG.debug('Received on WebSocket: %s', message) + self.tfw_router.route(message) + + # much secure, very cors, wow + def check_origin(self, origin): + return True diff --git a/tfw/logging.py b/tfw/logging.py new file mode 100644 index 0000000..dad7499 --- /dev/null +++ b/tfw/logging.py @@ -0,0 +1,132 @@ +# pylint: disable=bad-whitespace +from datetime import datetime +from typing import TextIO, Union +from dataclasses import dataclass +from traceback import format_exception +from logging import DEBUG, getLogger, Handler, Formatter, Filter + + +class Color: + RED = '\033[31m' + GREEN = '\033[32m' + YELLOW = '\033[33m' + BLUE = '\033[34m' + CYAN = '\033[36m' + WHITE = '\033[37m' + RESET = '\033[0m' + + +@dataclass +class Log: + stream: Union[str, TextIO] + formatter: Formatter + + +class Logger: + def __init__(self, logs, level=DEBUG): + self.root_logger = getLogger() + self.old_level = self.root_logger.level + self.new_level = level + self.handlers = [] + for log in logs: + handler = LogHandler(log.stream) + handler.setFormatter(log.formatter) + self.handlers.append(handler) + + def start(self): + self.root_logger.setLevel(self.new_level) + for handler in self.handlers: + self.root_logger.addHandler(handler) + + def stop(self): + self.root_logger.setLevel(self.old_level) + for handler in self.handlers: + handler.close() + self.root_logger.removeHandler(handler) + + +class LogHandler(Handler): + def __init__(self, stream): + if isinstance(stream, str): + self.stream = open(stream, 'a+') + self.close_stream = True + else: + self.stream = stream + self.close_stream = False + super().__init__() + + def emit(self, record): + entry = self.format(record) + self.stream.write(entry+'\n') + self.stream.flush() + + def close(self): + if self.close_stream: + self.stream.close() + +class LogFormatter(Formatter): + severity_to_color = { + 'CRITICAL' : Color.RED, + 'ERROR' : Color.RED, + 'WARNING' : Color.YELLOW, + 'INFO' : Color.GREEN, + 'DEBUG' : Color.BLUE, + 'NOTSET' : Color.CYAN + } + + def __init__(self, limit): + self.limit = limit + super().__init__() + + def format(self, record): + time = datetime.utcfromtimestamp(record.created).strftime('%H:%M:%S') + if record.args: + tuple_args = (record.args,) if isinstance(record.args, dict) else record.args + clean_args = tuple((self.trim(arg) for arg in tuple_args)) + message = record.msg % clean_args + else: + message = record.msg + trace = '\n'+''.join(format_exception(*record.exc_info)) if record.exc_info else '' + + return (f'[{Color.WHITE}{time}{Color.RESET}|>' + f'{self.severity_to_color[record.levelname]}{record.module}:' + f'{record.levelname.lower()}{Color.RESET}] {message}{trace}') + + def trim(self, value): + if isinstance(value, dict): + return {k: self.trim(v) for k, v in value.items()} + if isinstance(value, str): + value_str = str(value) + return value_str if len(value_str) <= self.limit else f'{value_str[:self.limit]}...' + return value + + +class VerboseLogFormatter(Formatter): + def format(self, record): # pylint: disable=no-self-use + date = datetime.utcfromtimestamp(record.created).strftime('%H:%M:%S') + if record.args: + message = record.msg % record.args + else: + message = record.msg + trace = '\n'+''.join(format_exception(*record.exc_info)) if record.exc_info else '' + + return (f'[{date}|>{record.module}:{record.levelname.lower()}] ' + f'{message}{trace}') + + +class WhitelistFilter(Filter): + def __init__(self, names): + self.names = names + super().__init__() + + def filter(self, record): + return record.module in self.names + + +class BlacklistFilter(Filter): + def __init__(self, names): + self.names = names + super().__init__() + + def filter(self, record): + return record.module not in self.names diff --git a/tfw/main/__init__.py b/tfw/main/__init__.py new file mode 100644 index 0000000..9dac792 --- /dev/null +++ b/tfw/main/__init__.py @@ -0,0 +1,5 @@ +from .event_handler_factory import EventHandlerFactory +from .logging import setup_logger +from .signal_handling import setup_signal_handlers +from .tfw_connector import TFWUplinkConnector, TFWConnector +from .tfw_server import TFWServer diff --git a/tfw/main/event_handler_factory.py b/tfw/main/event_handler_factory.py new file mode 100644 index 0000000..b913898 --- /dev/null +++ b/tfw/main/event_handler_factory.py @@ -0,0 +1,8 @@ +from tfw.internals.event_handling import EventHandlerFactoryBase + +from .tfw_connector import TFWConnector + + +class EventHandlerFactory(EventHandlerFactoryBase): + def _build_connector(self): + return TFWConnector() diff --git a/tfw/main/logging.py b/tfw/main/logging.py new file mode 100644 index 0000000..6d7fb02 --- /dev/null +++ b/tfw/main/logging.py @@ -0,0 +1,12 @@ +from sys import stderr +from os.path import join + +from tfw.config import TFWENV +from tfw.logging import Log, Logger, LogFormatter, VerboseLogFormatter + + +def setup_logger(name): + Logger([ + Log(stderr, LogFormatter(20)), + Log(join(TFWENV.LOGS_DIR, name+'.log'), VerboseLogFormatter()) + ]).start() diff --git a/tfw/main/signal_handling.py b/tfw/main/signal_handling.py new file mode 100644 index 0000000..387c77d --- /dev/null +++ b/tfw/main/signal_handling.py @@ -0,0 +1,11 @@ +from signal import signal, SIGTERM, SIGINT + +from tfw.internals.event_handling import EventHandler + + +def setup_signal_handlers(): + def stop(*_): + EventHandler.stop_all_instances() + exit(0) + signal(SIGTERM, stop) + signal(SIGINT, stop) diff --git a/tfw/main/tfw_connector.py b/tfw/main/tfw_connector.py new file mode 100644 index 0000000..be21dab --- /dev/null +++ b/tfw/main/tfw_connector.py @@ -0,0 +1,25 @@ +from tfw.internals.networking import ZMQConnector, ZMQUplinkConnector +from tfw.config import TFWENV + + +class ConnAddrMixin: + @property + def uplink_conn_addr(self): + return f'tcp://localhost:{TFWENV.PULL_PORT}' + + @property + def downlink_conn_addr(self): + return f'tcp://localhost:{TFWENV.PUB_PORT}' + + +class TFWUplinkConnector(ZMQUplinkConnector, ConnAddrMixin): + def __init__(self): + super().__init__(self.uplink_conn_addr) + + +class TFWConnector(ZMQConnector, ConnAddrMixin): + def __init__(self): + super().__init__( + self.downlink_conn_addr, + self.uplink_conn_addr + ) diff --git a/tfw/main/tfw_server.py b/tfw/main/tfw_server.py new file mode 100644 index 0000000..8c64a8e --- /dev/null +++ b/tfw/main/tfw_server.py @@ -0,0 +1,34 @@ +import logging + +from tornado.web import Application + +from tfw.internals.networking import ZMQListener +from tfw.internals.server import ZMQWebSocketRouter +from tfw.config import TFWENV + +LOG = logging.getLogger(__name__) + + +class TFWServer: + """ + This class handles the proxying of messages between the frontend and event handers. + It proxies messages from the "/ws" route to all event handlers subscribed to a ZMQ + SUB socket. + """ + def __init__(self): + downlink_bind_addr = f'tcp://*:{TFWENV.PULL_PORT}' + uplink_bind_addr = f'tcp://*:{TFWENV.PUB_PORT}' + self._listener = ZMQListener( + downlink_bind_addr=downlink_bind_addr, + uplink_bind_addr=uplink_bind_addr + ) + LOG.debug('Pull socket bound to %s', downlink_bind_addr) + LOG.debug('Pub socket bound to %s', uplink_bind_addr) + self.application = Application([( + r'/ws', ZMQWebSocketRouter, { + 'listener': self._listener, + } + )]) + + def listen(self): + self.application.listen(TFWENV.WEB_PORT)