mirror of
https://github.com/avatao-content/baseimage-tutorial-framework
synced 2025-06-29 05:05:12 +00:00
Rework whole package structure (improved dependency handling)
This commit is contained in:
@ -1,11 +0,0 @@
|
||||
from .commands_equal import CommandsEqual
|
||||
from .file_manager import FileManager
|
||||
from .fsm_updater import FSMUpdater
|
||||
from .history_monitor import BashMonitor, GDBMonitor
|
||||
from .log_inotify_observer import LogInotifyObserver
|
||||
from .message_sender import MessageSender
|
||||
from .message_storage import FrontendMessageStorage
|
||||
from .snapshot_provider import SnapshotProvider
|
||||
from .supervisor import ProcessManager, LogManager
|
||||
from .terminado_mini_server import TerminadoMiniServer
|
||||
from .terminal_commands import TerminalCommands
|
||||
|
2
lib/tfw/components/frontend/__init__.py
Normal file
2
lib/tfw/components/frontend/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
from .frontend_handler import FrontendHandler
|
||||
from .message_sender import MessageSender
|
25
lib/tfw/components/frontend/frontend_handler.py
Normal file
25
lib/tfw/components/frontend/frontend_handler.py
Normal file
@ -0,0 +1,25 @@
|
||||
from tfw.internals.networking import Scope
|
||||
|
||||
from .message_storage import FrontendMessageStorage
|
||||
|
||||
|
||||
class FrontendHandler:
|
||||
keys = ['message', 'queueMessages', 'dashboard', 'console']
|
||||
|
||||
def __init__(self):
|
||||
self.server_connector = None
|
||||
self.keys = [*type(self).keys, 'recover']
|
||||
self._frontend_message_storage = FrontendMessageStorage(type(self).keys)
|
||||
|
||||
def send_message(self, message):
|
||||
self.server_connector.send_message(message, scope=Scope.WEBSOCKET)
|
||||
|
||||
def handle_event(self, message, _):
|
||||
self._frontend_message_storage.save_message(message)
|
||||
if message['key'] == 'recover':
|
||||
self.recover_frontend()
|
||||
self.send_message(message)
|
||||
|
||||
def recover_frontend(self):
|
||||
for message in self._frontend_message_storage.messages:
|
||||
self.send_message(message)
|
1
lib/tfw/components/fsm/__init__.py
Normal file
1
lib/tfw/components/fsm/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
from .fsm_handler import FSMHandler
|
71
lib/tfw/components/fsm/fsm_handler.py
Normal file
71
lib/tfw/components/fsm/fsm_handler.py
Normal file
@ -0,0 +1,71 @@
|
||||
import logging
|
||||
|
||||
from tfw.internals.crypto import KeyManager, sign_message, verify_message
|
||||
from tfw.internals.networking import Scope
|
||||
|
||||
from .fsm_updater import FSMUpdater
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FSMHandler:
|
||||
keys = ['fsm']
|
||||
"""
|
||||
EventHandler responsible for managing the state machine of
|
||||
the framework (TFW FSM).
|
||||
|
||||
tfw.networking.TFWServer instances automatically send 'trigger'
|
||||
commands to the event handler listening on the 'fsm' key,
|
||||
which should be an instance of this event handler.
|
||||
|
||||
This event handler accepts messages that have a
|
||||
data['command'] key specifying a command to be executed.
|
||||
|
||||
An 'fsm_update' message is broadcasted after every successful
|
||||
command.
|
||||
"""
|
||||
def __init__(self, *, fsm_type, require_signature=False):
|
||||
self.fsm = fsm_type()
|
||||
self._fsm_updater = FSMUpdater(self.fsm)
|
||||
self.auth_key = KeyManager().auth_key
|
||||
self._require_signature = require_signature
|
||||
|
||||
self.command_handlers = {
|
||||
'trigger': self.handle_trigger,
|
||||
'update': self.handle_update
|
||||
}
|
||||
|
||||
def handle_event(self, message, server_connector):
|
||||
try:
|
||||
message = self.command_handlers[message['data']['command']](message)
|
||||
if message:
|
||||
fsm_update_message = self._fsm_updater.fsm_update
|
||||
sign_message(self.auth_key, message)
|
||||
sign_message(self.auth_key, fsm_update_message)
|
||||
server_connector.send_message(fsm_update_message, Scope.BROADCAST)
|
||||
except KeyError:
|
||||
LOG.error('IGNORING MESSAGE: Invalid message received: %s', message)
|
||||
|
||||
def handle_trigger(self, message):
|
||||
"""
|
||||
Attempts to step the FSM with the supplied trigger.
|
||||
|
||||
:param message: TFW message with a data field containing
|
||||
the action to try triggering in data['value']
|
||||
"""
|
||||
trigger = message['data']['value']
|
||||
if self._require_signature:
|
||||
if not verify_message(self.auth_key, message):
|
||||
LOG.error('Ignoring unsigned trigger command: %s', message)
|
||||
return None
|
||||
if self.fsm.step(trigger):
|
||||
return message
|
||||
return None
|
||||
|
||||
def handle_update(self, message):
|
||||
"""
|
||||
Does nothing, but triggers an 'fsm_update' message.
|
||||
"""
|
||||
# pylint: disable=no-self-use
|
||||
return message
|
1
lib/tfw/components/ide/__init__.py
Normal file
1
lib/tfw/components/ide/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
from .ide_handler import IdeHandler
|
196
lib/tfw/components/ide/ide_handler.py
Normal file
196
lib/tfw/components/ide/ide_handler.py
Normal file
@ -0,0 +1,196 @@
|
||||
import logging
|
||||
|
||||
from tfw.internals.networking import Scope
|
||||
from tfw.internals.inotify import InotifyObserver
|
||||
|
||||
from .file_manager import FileManager
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
BUILD_ARTIFACTS = (
|
||||
"*.a",
|
||||
"*.class",
|
||||
"*.dll",
|
||||
"*.dylib",
|
||||
"*.elf",
|
||||
"*.exe",
|
||||
"*.jar",
|
||||
"*.ko",
|
||||
"*.la",
|
||||
"*.lib",
|
||||
"*.lo",
|
||||
"*.o",
|
||||
"*.obj",
|
||||
"*.out",
|
||||
"*.py[cod]",
|
||||
"*.so",
|
||||
"*.so.*",
|
||||
"*.tar.gz",
|
||||
"*.zip",
|
||||
"*__pycache__*"
|
||||
)
|
||||
|
||||
|
||||
class IdeHandler:
|
||||
keys = ['ide']
|
||||
# pylint: disable=too-many-arguments,anomalous-backslash-in-string
|
||||
"""
|
||||
Event handler implementing the backend of our browser based IDE.
|
||||
By default all files in the directory specified in __init__ are displayed
|
||||
on the fontend. Note that this is a stateful component.
|
||||
|
||||
When any file in the selected directory changes they are automatically refreshed
|
||||
on the frontend (this is done by listening to inotify events).
|
||||
|
||||
This EventHandler accepts messages that have a data['command'] key specifying
|
||||
a command to be executed.
|
||||
|
||||
The API of each command is documented in their respective handler.
|
||||
"""
|
||||
def __init__(self, *, directory, allowed_directories, selected_file=None, exclude=None):
|
||||
"""
|
||||
:param key: the key this instance should listen to
|
||||
:param directory: working directory which the EventHandler should serve files from
|
||||
:param allowed_directories: list of directories that can be switched to using selectdir
|
||||
:param selected_file: file that is selected by default
|
||||
:param exclude: list of filenames that should not appear between files (for .o, .pyc, etc.)
|
||||
"""
|
||||
self.server_connector = None
|
||||
try:
|
||||
self.filemanager = FileManager(
|
||||
allowed_directories=allowed_directories,
|
||||
working_directory=directory,
|
||||
selected_file=selected_file,
|
||||
exclude=exclude
|
||||
)
|
||||
except IndexError:
|
||||
raise EnvironmentError(
|
||||
f'No file(s) in IdeEventHandler working_directory "{directory}"!'
|
||||
)
|
||||
|
||||
self.monitor = InotifyObserver(
|
||||
self.filemanager.allowed_directories,
|
||||
exclude=BUILD_ARTIFACTS
|
||||
)
|
||||
self.monitor.on_modified = self._reload_frontend
|
||||
self.monitor.start()
|
||||
|
||||
self.commands = {
|
||||
'read': self.read,
|
||||
'write': self.write,
|
||||
'select': self.select,
|
||||
'selectdir': self.select_dir,
|
||||
'exclude': self.exclude
|
||||
}
|
||||
|
||||
def _reload_frontend(self, event): # pylint: disable=unused-argument
|
||||
self.send_message({
|
||||
'key': 'ide',
|
||||
'data': {'command': 'reload'}
|
||||
})
|
||||
|
||||
def send_message(self, message):
|
||||
self.server_connector.send_message(message, scope=Scope.WEBSOCKET)
|
||||
|
||||
def read(self, data):
|
||||
"""
|
||||
Read the currently selected file.
|
||||
|
||||
:return dict: TFW message data containing key 'content'
|
||||
(contents of the selected file)
|
||||
"""
|
||||
try:
|
||||
data['content'] = self.filemanager.file_contents
|
||||
except PermissionError:
|
||||
data['content'] = 'You have no permission to open that file :('
|
||||
except FileNotFoundError:
|
||||
data['content'] = 'This file was removed :('
|
||||
except Exception: # pylint: disable=broad-except
|
||||
data['content'] = 'Failed to read file :('
|
||||
return data
|
||||
|
||||
def write(self, data):
|
||||
"""
|
||||
Overwrites a file with the desired string.
|
||||
|
||||
:param data: TFW message data containing key 'content'
|
||||
(new file content)
|
||||
|
||||
"""
|
||||
try:
|
||||
self.filemanager.file_contents = data['content']
|
||||
except Exception: # pylint: disable=broad-except
|
||||
LOG.exception('Error writing file!')
|
||||
del data['content']
|
||||
return data
|
||||
|
||||
def select(self, data):
|
||||
"""
|
||||
Selects a file from the current directory.
|
||||
|
||||
:param data: TFW message data containing 'filename'
|
||||
(name of file to select relative to the current directory)
|
||||
"""
|
||||
try:
|
||||
self.filemanager.filename = data['filename']
|
||||
except EnvironmentError:
|
||||
LOG.exception('Failed to select file "%s"', data['filename'])
|
||||
return data
|
||||
|
||||
def select_dir(self, data):
|
||||
"""
|
||||
Select a new working directory to display files from.
|
||||
|
||||
:param data: TFW message data containing 'directory'
|
||||
(absolute path of diretory to select.
|
||||
must be a path whitelisted in
|
||||
self.allowed_directories)
|
||||
"""
|
||||
try:
|
||||
self.filemanager.workdir = data['directory']
|
||||
try:
|
||||
self.filemanager.filename = self.filemanager.files[0]
|
||||
self.read(data)
|
||||
except IndexError:
|
||||
data['content'] = 'No files in this directory :('
|
||||
except EnvironmentError as err:
|
||||
LOG.error(
|
||||
'Failed to select directory "%s". Reason: %s',
|
||||
data['directory'], str(err)
|
||||
)
|
||||
return data
|
||||
|
||||
def exclude(self, data):
|
||||
"""
|
||||
Overwrite list of excluded files
|
||||
|
||||
:param data: TFW message data containing 'exclude'
|
||||
(list of unix-style filename patterns to be excluded,
|
||||
e.g.: ["\*.pyc", "\*.o")
|
||||
"""
|
||||
try:
|
||||
self.filemanager.exclude = list(data['exclude'])
|
||||
except TypeError:
|
||||
LOG.error('Exclude must be Iterable!')
|
||||
return data
|
||||
|
||||
def attach_fileinfo(self, data):
|
||||
"""
|
||||
Basic information included in every response to the frontend.
|
||||
"""
|
||||
data['filename'] = self.filemanager.filename
|
||||
data['files'] = self.filemanager.files
|
||||
data['directory'] = self.filemanager.workdir
|
||||
|
||||
def handle_event(self, message, _):
|
||||
try:
|
||||
data = message['data']
|
||||
message['data'] = self.commands[data['command']](data)
|
||||
self.attach_fileinfo(data)
|
||||
self.send_message(message)
|
||||
except KeyError:
|
||||
LOG.error('IGNORING MESSAGE: Invalid message received: %s', message)
|
||||
|
||||
def cleanup(self):
|
||||
self.monitor.stop()
|
@ -1,6 +0,0 @@
|
||||
from .inotify import InotifyObserver
|
||||
from .inotify import (
|
||||
InotifyFileCreatedEvent, InotifyFileModifiedEvent, InotifyFileMovedEvent,
|
||||
InotifyFileDeletedEvent, InotifyDirCreatedEvent, InotifyDirModifiedEvent,
|
||||
InotifyDirMovedEvent, InotifyDirDeletedEvent
|
||||
)
|
@ -1,189 +0,0 @@
|
||||
# pylint: disable=too-few-public-methods
|
||||
|
||||
from typing import Iterable
|
||||
from time import time
|
||||
from os.path import abspath, dirname, isdir
|
||||
|
||||
from watchdog.observers import Observer
|
||||
from watchdog.events import FileSystemMovedEvent, PatternMatchingEventHandler
|
||||
from watchdog.events import (
|
||||
FileCreatedEvent, FileModifiedEvent, FileMovedEvent, FileDeletedEvent,
|
||||
DirCreatedEvent, DirModifiedEvent, DirMovedEvent, DirDeletedEvent
|
||||
)
|
||||
|
||||
|
||||
class InotifyEvent:
|
||||
def __init__(self, src_path):
|
||||
self.date = time()
|
||||
self.src_path = src_path
|
||||
|
||||
def __str__(self):
|
||||
return self.__repr__()
|
||||
|
||||
def __repr__(self):
|
||||
return f'{self.__class__.__name__}({self.src_path})'
|
||||
|
||||
|
||||
class InotifyMovedEvent(InotifyEvent):
|
||||
def __init__(self, src_path, dest_path):
|
||||
self.dest_path = dest_path
|
||||
super().__init__(src_path)
|
||||
|
||||
def __repr__(self):
|
||||
return f'{self.__class__.__name__}({self.src_path}, {self.dest_path})'
|
||||
|
||||
|
||||
class InotifyFileCreatedEvent(InotifyEvent):
|
||||
pass
|
||||
|
||||
|
||||
class InotifyFileModifiedEvent(InotifyEvent):
|
||||
pass
|
||||
|
||||
|
||||
class InotifyFileMovedEvent(InotifyMovedEvent):
|
||||
pass
|
||||
|
||||
|
||||
class InotifyFileDeletedEvent(InotifyEvent):
|
||||
pass
|
||||
|
||||
|
||||
class InotifyDirCreatedEvent(InotifyEvent):
|
||||
pass
|
||||
|
||||
|
||||
class InotifyDirModifiedEvent(InotifyEvent):
|
||||
pass
|
||||
|
||||
|
||||
class InotifyDirMovedEvent(InotifyMovedEvent):
|
||||
pass
|
||||
|
||||
|
||||
class InotifyDirDeletedEvent(InotifyEvent):
|
||||
pass
|
||||
|
||||
|
||||
class InotifyObserver:
|
||||
def __init__(self, path, patterns=None, exclude=None, recursive=False):
|
||||
self._files = []
|
||||
self._paths = path
|
||||
self._patterns = patterns or []
|
||||
self._exclude = exclude
|
||||
self._recursive = recursive
|
||||
self._observer = Observer()
|
||||
self._reset()
|
||||
|
||||
def _reset(self):
|
||||
if isinstance(self._paths, str):
|
||||
self._paths = [self._paths]
|
||||
if isinstance(self._paths, Iterable):
|
||||
self._extract_files_from_paths()
|
||||
else:
|
||||
raise ValueError('Expected one or more string paths.')
|
||||
|
||||
patterns = self._files+self.patterns
|
||||
handler = PatternMatchingEventHandler(patterns if patterns else None, self.exclude)
|
||||
handler.on_any_event = self._dispatch_event
|
||||
self._observer.unschedule_all()
|
||||
for path in self.paths:
|
||||
self._observer.schedule(handler, path, self._recursive)
|
||||
|
||||
def _extract_files_from_paths(self):
|
||||
files, paths = [], []
|
||||
for path in self._paths:
|
||||
path = abspath(path)
|
||||
if isdir(path):
|
||||
paths.append(path)
|
||||
else:
|
||||
paths.append(dirname(path))
|
||||
files.append(path)
|
||||
self._files, self._paths = files, paths
|
||||
|
||||
@property
|
||||
def paths(self):
|
||||
return self._paths
|
||||
|
||||
@paths.setter
|
||||
def paths(self, paths):
|
||||
self._paths = paths
|
||||
self._reset()
|
||||
|
||||
@property
|
||||
def patterns(self):
|
||||
return self._patterns
|
||||
|
||||
@patterns.setter
|
||||
def patterns(self, patterns):
|
||||
self._patterns = patterns or []
|
||||
self._reset()
|
||||
|
||||
@property
|
||||
def exclude(self):
|
||||
return self._exclude
|
||||
|
||||
@exclude.setter
|
||||
def exclude(self, exclude):
|
||||
self._exclude = exclude
|
||||
self._reset()
|
||||
|
||||
def start(self):
|
||||
self._observer.start()
|
||||
|
||||
def stop(self):
|
||||
self._observer.stop()
|
||||
self._observer.join()
|
||||
|
||||
def _dispatch_event(self, event):
|
||||
event_to_action = {
|
||||
InotifyFileCreatedEvent : self.on_created,
|
||||
InotifyFileModifiedEvent : self.on_modified,
|
||||
InotifyFileMovedEvent : self.on_moved,
|
||||
InotifyFileDeletedEvent : self.on_deleted,
|
||||
InotifyDirCreatedEvent : self.on_created,
|
||||
InotifyDirModifiedEvent : self.on_modified,
|
||||
InotifyDirMovedEvent : self.on_moved,
|
||||
InotifyDirDeletedEvent : self.on_deleted
|
||||
}
|
||||
|
||||
event = self._transform_event(event)
|
||||
self.on_any_event(event)
|
||||
event_to_action[type(event)](event)
|
||||
|
||||
@staticmethod
|
||||
def _transform_event(event):
|
||||
watchdog_to_inotify = {
|
||||
FileCreatedEvent : InotifyFileCreatedEvent,
|
||||
FileModifiedEvent : InotifyFileModifiedEvent,
|
||||
FileMovedEvent : InotifyFileMovedEvent,
|
||||
FileDeletedEvent : InotifyFileDeletedEvent,
|
||||
DirCreatedEvent : InotifyDirCreatedEvent,
|
||||
DirModifiedEvent : InotifyDirModifiedEvent,
|
||||
DirMovedEvent : InotifyDirMovedEvent,
|
||||
DirDeletedEvent : InotifyDirDeletedEvent
|
||||
}
|
||||
|
||||
try:
|
||||
cls = watchdog_to_inotify[type(event)]
|
||||
except KeyError:
|
||||
raise NameError('Watchdog API returned an unknown event.')
|
||||
|
||||
if isinstance(event, FileSystemMovedEvent):
|
||||
return cls(event.src_path, event.dest_path)
|
||||
return cls(event.src_path)
|
||||
|
||||
def on_any_event(self, event):
|
||||
pass
|
||||
|
||||
def on_created(self, event):
|
||||
pass
|
||||
|
||||
def on_modified(self, event):
|
||||
pass
|
||||
|
||||
def on_moved(self, event):
|
||||
pass
|
||||
|
||||
def on_deleted(self, event):
|
||||
pass
|
@ -1,179 +0,0 @@
|
||||
# pylint: disable=redefined-outer-name
|
||||
|
||||
from queue import Empty, Queue
|
||||
from secrets import token_urlsafe
|
||||
from pathlib import Path
|
||||
from shutil import rmtree
|
||||
from os.path import join
|
||||
from os import mkdir, remove, rename
|
||||
from tempfile import TemporaryDirectory
|
||||
from contextlib import suppress
|
||||
|
||||
import watchdog
|
||||
import pytest
|
||||
|
||||
from .inotify import InotifyObserver
|
||||
from .inotify import (
|
||||
InotifyFileCreatedEvent, InotifyFileModifiedEvent, InotifyFileMovedEvent,
|
||||
InotifyFileDeletedEvent, InotifyDirCreatedEvent, InotifyDirModifiedEvent,
|
||||
InotifyDirMovedEvent, InotifyDirDeletedEvent
|
||||
)
|
||||
|
||||
with suppress(AttributeError):
|
||||
watchdog.observers.inotify_buffer.InotifyBuffer.delay = 0
|
||||
|
||||
|
||||
class InotifyContext:
|
||||
def __init__(self, workdir, subdir, subfile, observer):
|
||||
self.missing_events = 0
|
||||
self.workdir = workdir
|
||||
self.subdir = subdir
|
||||
self.subfile = subfile
|
||||
self.observer = observer
|
||||
|
||||
self.event_to_queue = {
|
||||
InotifyFileCreatedEvent : self.observer.create_queue,
|
||||
InotifyFileModifiedEvent : self.observer.modify_queue,
|
||||
InotifyFileMovedEvent : self.observer.move_queue,
|
||||
InotifyFileDeletedEvent : self.observer.delete_queue,
|
||||
InotifyDirCreatedEvent : self.observer.create_queue,
|
||||
InotifyDirModifiedEvent : self.observer.modify_queue,
|
||||
InotifyDirMovedEvent : self.observer.move_queue,
|
||||
InotifyDirDeletedEvent : self.observer.delete_queue
|
||||
}
|
||||
|
||||
def create_random_file(self, dirname, extension):
|
||||
filename = self.join(f'{dirname}/{generate_name()}{extension}')
|
||||
Path(filename).touch()
|
||||
return filename
|
||||
|
||||
def create_random_folder(self, basepath):
|
||||
dirname = self.join(f'{basepath}/{generate_name()}')
|
||||
mkdir(dirname)
|
||||
return dirname
|
||||
|
||||
def join(self, path):
|
||||
return join(self.workdir, path)
|
||||
|
||||
def check_event(self, event_type, path):
|
||||
self.missing_events += 1
|
||||
event = self.event_to_queue[event_type].get(timeout=0.1)
|
||||
assert isinstance(event, event_type)
|
||||
assert event.src_path == path
|
||||
return event
|
||||
|
||||
def check_empty(self, event_type):
|
||||
with pytest.raises(Empty):
|
||||
self.event_to_queue[event_type].get(timeout=0.1)
|
||||
|
||||
def check_any(self):
|
||||
attrs = self.observer.__dict__.values()
|
||||
total = sum([q.qsize() for q in attrs if isinstance(q, Queue)])
|
||||
return total+self.missing_events == len(self.observer.any_list)
|
||||
|
||||
|
||||
class InotifyTestObserver(InotifyObserver):
|
||||
def __init__(self, paths, patterns=None, exclude=None, recursive=False):
|
||||
self.any_list = []
|
||||
self.create_queue, self.modify_queue, self.move_queue, self.delete_queue = [Queue() for _ in range(4)]
|
||||
super().__init__(paths, patterns, exclude, recursive)
|
||||
|
||||
def on_any_event(self, event):
|
||||
self.any_list.append(event)
|
||||
|
||||
def on_created(self, event):
|
||||
self.create_queue.put(event)
|
||||
|
||||
def on_modified(self, event):
|
||||
self.modify_queue.put(event)
|
||||
|
||||
def on_moved(self, event):
|
||||
self.move_queue.put(event)
|
||||
|
||||
def on_deleted(self, event):
|
||||
self.delete_queue.put(event)
|
||||
|
||||
def generate_name():
|
||||
return token_urlsafe(16)
|
||||
|
||||
@pytest.fixture()
|
||||
def context():
|
||||
with TemporaryDirectory() as workdir:
|
||||
subdir = join(workdir, generate_name())
|
||||
subfile = join(subdir, generate_name()+'.txt')
|
||||
mkdir(subdir)
|
||||
Path(subfile).touch()
|
||||
monitor = InotifyTestObserver(workdir, recursive=True)
|
||||
monitor.start()
|
||||
yield InotifyContext(workdir, subdir, subfile, monitor)
|
||||
|
||||
def test_create(context):
|
||||
newfile = context.create_random_file(context.workdir, '.txt')
|
||||
context.check_event(InotifyFileCreatedEvent, newfile)
|
||||
newdir = context.create_random_folder(context.workdir)
|
||||
context.check_event(InotifyDirCreatedEvent, newdir)
|
||||
assert context.check_any()
|
||||
|
||||
def test_modify(context):
|
||||
with open(context.subfile, 'wb', buffering=0) as ofile:
|
||||
ofile.write(b'text')
|
||||
context.check_event(InotifyFileModifiedEvent, context.subfile)
|
||||
while True:
|
||||
try:
|
||||
context.observer.modify_queue.get(timeout=0.1)
|
||||
context.missing_events += 1
|
||||
except Empty:
|
||||
break
|
||||
rename(context.subfile, context.subfile+'_new')
|
||||
context.check_event(InotifyDirModifiedEvent, context.subdir)
|
||||
assert context.check_any()
|
||||
|
||||
def test_move(context):
|
||||
rename(context.subdir, context.subdir+'_new')
|
||||
context.check_event(InotifyDirMovedEvent, context.subdir)
|
||||
context.check_event(InotifyFileMovedEvent, context.subfile)
|
||||
assert context.check_any()
|
||||
|
||||
def test_delete(context):
|
||||
rmtree(context.subdir)
|
||||
context.check_event(InotifyFileDeletedEvent, context.subfile)
|
||||
context.check_event(InotifyDirDeletedEvent, context.subdir)
|
||||
assert context.check_any()
|
||||
|
||||
def test_paths(context):
|
||||
context.observer.paths = context.subdir
|
||||
newdir = context.create_random_folder(context.workdir)
|
||||
newfile = context.create_random_file(context.subdir, '.txt')
|
||||
context.check_event(InotifyDirModifiedEvent, context.subdir)
|
||||
context.check_event(InotifyFileCreatedEvent, newfile)
|
||||
context.observer.paths = [newdir, newfile]
|
||||
remove(newfile)
|
||||
context.check_event(InotifyFileDeletedEvent, newfile)
|
||||
assert context.check_any()
|
||||
context.observer.paths = context.workdir
|
||||
|
||||
def test_patterns(context):
|
||||
context.observer.patterns = ['*.txt']
|
||||
context.create_random_file(context.subdir, '.bin')
|
||||
newfile = context.create_random_file(context.subdir, '.txt')
|
||||
context.check_event(InotifyFileCreatedEvent, newfile)
|
||||
context.check_empty(InotifyFileCreatedEvent)
|
||||
assert context.check_any()
|
||||
context.observer.patterns = None
|
||||
|
||||
def test_exclude(context):
|
||||
context.observer.exclude = ['*.txt']
|
||||
context.create_random_file(context.subdir, '.txt')
|
||||
newfile = context.create_random_file(context.subdir, '.bin')
|
||||
context.check_event(InotifyFileCreatedEvent, newfile)
|
||||
context.check_empty(InotifyFileCreatedEvent)
|
||||
assert context.check_any()
|
||||
context.observer.exclude = None
|
||||
|
||||
def test_stress(context):
|
||||
newfile = []
|
||||
for i in range(1024):
|
||||
newfile.append(context.create_random_file(context.subdir, '.txt'))
|
||||
for i in range(1024):
|
||||
context.check_event(InotifyFileCreatedEvent, newfile[i])
|
||||
assert context.check_any()
|
1
lib/tfw/components/pipe_io/__init__.py
Normal file
1
lib/tfw/components/pipe_io/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
from .pipe_io_handler import PipeIOHandler, PipeIOHandlerBase, TransformerPipeIOHandler, CommandHandler
|
143
lib/tfw/components/pipe_io/pipe_io_handler.py
Normal file
143
lib/tfw/components/pipe_io/pipe_io_handler.py
Normal file
@ -0,0 +1,143 @@
|
||||
import logging
|
||||
from abc import abstractmethod
|
||||
from json import loads, dumps
|
||||
from subprocess import run, PIPE, Popen
|
||||
from functools import partial
|
||||
from os import getpgid, killpg
|
||||
from os.path import join
|
||||
from signal import SIGTERM
|
||||
from secrets import token_urlsafe
|
||||
from threading import Thread
|
||||
from contextlib import suppress
|
||||
|
||||
from .pipe_io_server import PipeIOServer, terminate_process_on_failure
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
DEFAULT_PERMISSIONS = 0o600
|
||||
|
||||
|
||||
class PipeIOHandlerBase:
|
||||
keys = ['']
|
||||
|
||||
def __init__(self, in_pipe_path, out_pipe_path, permissions=DEFAULT_PERMISSIONS):
|
||||
self.server_connector = None
|
||||
self.pipe_io = CallbackPipeIOServer(
|
||||
in_pipe_path,
|
||||
out_pipe_path,
|
||||
self.handle_pipe_event,
|
||||
permissions
|
||||
)
|
||||
self.pipe_io.start()
|
||||
|
||||
@abstractmethod
|
||||
def handle_pipe_event(self, message_bytes):
|
||||
raise NotImplementedError()
|
||||
|
||||
def cleanup(self):
|
||||
self.pipe_io.stop()
|
||||
|
||||
|
||||
class CallbackPipeIOServer(PipeIOServer):
|
||||
def __init__(self, in_pipe_path, out_pipe_path, callback, permissions):
|
||||
super().__init__(in_pipe_path, out_pipe_path, permissions)
|
||||
self.callback = callback
|
||||
|
||||
def handle_message(self, message):
|
||||
try:
|
||||
self.callback(message)
|
||||
except: # pylint: disable=bare-except
|
||||
LOG.exception('Failed to handle message %s from pipe %s!', message, self.in_pipe)
|
||||
|
||||
|
||||
class PipeIOHandler(PipeIOHandlerBase):
|
||||
def handle_event(self, message, _):
|
||||
json_bytes = dumps(message).encode()
|
||||
self.pipe_io.send_message(json_bytes)
|
||||
|
||||
def handle_pipe_event(self, message_bytes):
|
||||
json = loads(message_bytes)
|
||||
self.server_connector.send_message(json)
|
||||
|
||||
|
||||
class TransformerPipeIOHandler(PipeIOHandlerBase):
|
||||
# pylint: disable=too-many-arguments
|
||||
def __init__(
|
||||
self, in_pipe_path, out_pipe_path,
|
||||
transform_in_cmd, transform_out_cmd,
|
||||
permissions=DEFAULT_PERMISSIONS
|
||||
):
|
||||
self._transform_in = partial(self._transform_message, transform_in_cmd)
|
||||
self._transform_out = partial(self._transform_message, transform_out_cmd)
|
||||
super().__init__(in_pipe_path, out_pipe_path, permissions)
|
||||
|
||||
@staticmethod
|
||||
def _transform_message(transform_cmd, message):
|
||||
proc = run(
|
||||
transform_cmd,
|
||||
input=message,
|
||||
stdout=PIPE,
|
||||
stderr=PIPE,
|
||||
shell=True
|
||||
)
|
||||
if proc.returncode == 0:
|
||||
return proc.stdout
|
||||
raise ValueError(f'Transforming message {message} failed!')
|
||||
|
||||
def handle_event(self, message, _):
|
||||
json_bytes = dumps(message).encode()
|
||||
transformed_bytes = self._transform_out(json_bytes)
|
||||
if transformed_bytes:
|
||||
self.pipe_io.send_message(transformed_bytes)
|
||||
|
||||
def handle_pipe_event(self, message_bytes):
|
||||
transformed_bytes = self._transform_in(message_bytes)
|
||||
if transformed_bytes:
|
||||
json_message = loads(transformed_bytes)
|
||||
self.server_connector.send_message(json_message)
|
||||
|
||||
|
||||
class CommandHandler(PipeIOHandler):
|
||||
def __init__(self, command, permissions=DEFAULT_PERMISSIONS):
|
||||
super().__init__(
|
||||
self._generate_tempfilename(),
|
||||
self._generate_tempfilename(),
|
||||
permissions
|
||||
)
|
||||
|
||||
self._proc_stdin = open(self.pipe_io.out_pipe, 'rb')
|
||||
self._proc_stdout = open(self.pipe_io.in_pipe, 'wb')
|
||||
self._proc = Popen(
|
||||
command, shell=True, executable='/bin/bash',
|
||||
stdin=self._proc_stdin, stdout=self._proc_stdout, stderr=PIPE,
|
||||
start_new_session=True
|
||||
)
|
||||
self._monitor_proc_thread = self._start_monitor_proc()
|
||||
|
||||
def _generate_tempfilename(self):
|
||||
# pylint: disable=no-self-use
|
||||
random_filename = partial(token_urlsafe, 10)
|
||||
return join('/tmp', f'{type(self).__name__}.{random_filename()}')
|
||||
|
||||
def _start_monitor_proc(self):
|
||||
thread = Thread(target=self._monitor_proc, daemon=True)
|
||||
thread.start()
|
||||
return thread
|
||||
|
||||
@terminate_process_on_failure
|
||||
def _monitor_proc(self):
|
||||
return_code = self._proc.wait()
|
||||
if return_code == -int(SIGTERM):
|
||||
# supervisord asked the program to terminate, this is fine
|
||||
return
|
||||
if return_code != 0:
|
||||
_, stderr = self._proc.communicate()
|
||||
raise RuntimeError(f'Subprocess failed ({return_code})! Stderr:\n{stderr.decode()}')
|
||||
|
||||
def cleanup(self):
|
||||
with suppress(ProcessLookupError):
|
||||
process_group_id = getpgid(self._proc.pid)
|
||||
killpg(process_group_id, SIGTERM)
|
||||
self._proc_stdin.close()
|
||||
self._proc_stdout.close()
|
||||
super().cleanup()
|
2
lib/tfw/components/process_management/__init__.py
Normal file
2
lib/tfw/components/process_management/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
from .process_handler import ProcessHandler
|
||||
from .process_log_handler import ProcessLogHandler
|
@ -1,19 +1,19 @@
|
||||
import logging
|
||||
|
||||
from tfw.networking import Scope
|
||||
from tfw.internals.networking import Scope
|
||||
from tfw.internals.inotify import InotifyObserver
|
||||
|
||||
from .inotify import InotifyObserver
|
||||
from .supervisor import LogManager
|
||||
from .supervisor import ProcessLogManager
|
||||
|
||||
|
||||
class LogInotifyObserver(InotifyObserver, LogManager):
|
||||
class LogInotifyObserver(InotifyObserver, ProcessLogManager):
|
||||
def __init__(self, server_connector, supervisor_uri, process_name, log_tail=0):
|
||||
self._prevent_log_recursion()
|
||||
self._server_connector = server_connector
|
||||
self._process_name = process_name
|
||||
self.log_tail = log_tail
|
||||
self._procinfo = None
|
||||
LogManager.__init__(self, supervisor_uri)
|
||||
ProcessLogManager.__init__(self, supervisor_uri)
|
||||
InotifyObserver.__init__(self, self._get_logfiles())
|
||||
|
||||
@staticmethod
|
54
lib/tfw/components/process_management/process_handler.py
Normal file
54
lib/tfw/components/process_management/process_handler.py
Normal file
@ -0,0 +1,54 @@
|
||||
import logging
|
||||
from xmlrpc.client import Fault as SupervisorFault
|
||||
|
||||
from tfw.internals.networking import Scope
|
||||
|
||||
from .supervisor import ProcessManager, ProcessLogManager
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProcessHandler(ProcessManager, ProcessLogManager):
|
||||
keys = ['processmanager']
|
||||
"""
|
||||
Event handler that can manage processes managed by supervisor.
|
||||
|
||||
This EventHandler accepts messages that have a data['command'] key specifying
|
||||
a command to be executed.
|
||||
Every message must contain a data['process_name'] field with the name of the
|
||||
process to manage. This is the name specified in supervisor config files like so:
|
||||
[program:someprogram]
|
||||
|
||||
Commands available: start, stop, restart, readlog
|
||||
(the names are as self-documenting as it gets)
|
||||
"""
|
||||
def __init__(self, *, supervisor_uri, log_tail=0):
|
||||
ProcessManager.__init__(self, supervisor_uri)
|
||||
ProcessLogManager.__init__(self, supervisor_uri)
|
||||
self.log_tail = log_tail
|
||||
self.commands = {
|
||||
'start': self.start_process,
|
||||
'stop': self.stop_process,
|
||||
'restart': self.restart_process
|
||||
}
|
||||
|
||||
def handle_event(self, message, server_connector):
|
||||
try:
|
||||
data = message['data']
|
||||
try:
|
||||
self.commands[data['command']](data['process_name'])
|
||||
except SupervisorFault as fault:
|
||||
message['data']['error'] = fault.faultString
|
||||
finally:
|
||||
message['data']['stdout'] = self.read_stdout(
|
||||
data['process_name'],
|
||||
self.log_tail
|
||||
)
|
||||
message['data']['stderr'] = self.read_stderr(
|
||||
data['process_name'],
|
||||
self.log_tail
|
||||
)
|
||||
server_connector.send_message(message, scope=Scope.WEBSOCKET)
|
||||
except KeyError:
|
||||
LOG.error('IGNORING MESSAGE: Invalid message received: %s', message)
|
69
lib/tfw/components/process_management/process_log_handler.py
Normal file
69
lib/tfw/components/process_management/process_log_handler.py
Normal file
@ -0,0 +1,69 @@
|
||||
import logging
|
||||
|
||||
from .log_inotify_observer import LogInotifyObserver
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProcessLogHandler:
|
||||
keys = ['logmonitor']
|
||||
"""
|
||||
Monitors the output of a supervisor process (stdout, stderr) and
|
||||
sends the results to the frontend.
|
||||
|
||||
Accepts messages that have a data['command'] key specifying
|
||||
a command to be executed.
|
||||
|
||||
The API of each command is documented in their respective handler.
|
||||
"""
|
||||
def __init__(self, *, process_name, supervisor_uri, log_tail=0):
|
||||
self.server_connector = None
|
||||
self.process_name = process_name
|
||||
self._supervisor_uri = supervisor_uri
|
||||
self._initial_log_tail = log_tail
|
||||
self._monitor = None
|
||||
|
||||
self.command_handlers = {
|
||||
'process_name': self.handle_process_name,
|
||||
'log_tail': self.handle_log_tail
|
||||
}
|
||||
|
||||
def start(self):
|
||||
self._monitor = LogInotifyObserver(
|
||||
server_connector=self.server_connector,
|
||||
supervisor_uri=self._supervisor_uri,
|
||||
process_name=self.process_name,
|
||||
log_tail=self._initial_log_tail
|
||||
)
|
||||
self._monitor.start()
|
||||
|
||||
def handle_event(self, message, _):
|
||||
try:
|
||||
data = message['data']
|
||||
self.command_handlers[data['command']](data)
|
||||
except KeyError:
|
||||
LOG.error('IGNORING MESSAGE: Invalid message received: %s', message)
|
||||
|
||||
def handle_process_name(self, data):
|
||||
"""
|
||||
Changes the monitored process.
|
||||
|
||||
:param data: TFW message data containing 'value'
|
||||
(name of the process to monitor)
|
||||
"""
|
||||
self._monitor.process_name = data['value']
|
||||
|
||||
def handle_log_tail(self, data):
|
||||
"""
|
||||
Sets tail length of the log the monitor will send
|
||||
to the frontend (the monitor will send back the last
|
||||
'value' characters of the log).
|
||||
|
||||
:param data: TFW message data containing 'value'
|
||||
(new tail length)
|
||||
"""
|
||||
self._monitor.log_tail = data['value']
|
||||
|
||||
def cleanup(self):
|
||||
self._monitor.stop()
|
@ -22,7 +22,7 @@ class ProcessManager(SupervisorBase):
|
||||
self.start_process(process_name)
|
||||
|
||||
|
||||
class LogManager(SupervisorBase):
|
||||
class ProcessLogManager(SupervisorBase):
|
||||
def read_stdout(self, process_name, tail=0):
|
||||
return self.supervisor.readProcessStdoutLog(process_name, -tail, 0)
|
||||
|
1
lib/tfw/components/snapshots/__init__.py
Normal file
1
lib/tfw/components/snapshots/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
from .snapshot_handler import SnapshotHandler
|
86
lib/tfw/components/snapshots/snapshot_handler.py
Normal file
86
lib/tfw/components/snapshots/snapshot_handler.py
Normal file
@ -0,0 +1,86 @@
|
||||
import logging
|
||||
from os.path import join as joinpath
|
||||
from os.path import basename
|
||||
from os import makedirs
|
||||
from datetime import datetime
|
||||
|
||||
from dateutil import parser as dateparser
|
||||
|
||||
from tfw.internals.networking import Scope
|
||||
|
||||
from .snapshot_provider import SnapshotProvider
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SnapshotHandler:
|
||||
keys = ['snapshot']
|
||||
|
||||
def __init__(self, *, directories, snapshots_dir, exclude_unix_patterns=None):
|
||||
self._snapshots_dir = snapshots_dir
|
||||
self.snapshot_providers = {}
|
||||
self._exclude_unix_patterns = exclude_unix_patterns
|
||||
self.init_snapshot_providers(directories)
|
||||
|
||||
self.command_handlers = {
|
||||
'take_snapshot': self.handle_take_snapshot,
|
||||
'restore_snapshot': self.handle_restore_snapshot,
|
||||
'exclude': self.handle_exclude
|
||||
}
|
||||
|
||||
def init_snapshot_providers(self, directories):
|
||||
for index, directory in enumerate(directories):
|
||||
git_dir = self.init_git_dir(index, directory)
|
||||
self.snapshot_providers[directory] = SnapshotProvider(
|
||||
directory,
|
||||
git_dir,
|
||||
self._exclude_unix_patterns
|
||||
)
|
||||
|
||||
def init_git_dir(self, index, directory):
|
||||
git_dir = joinpath(
|
||||
self._snapshots_dir,
|
||||
f'{basename(directory)}-{index}'
|
||||
)
|
||||
makedirs(git_dir, exist_ok=True)
|
||||
return git_dir
|
||||
|
||||
def handle_event(self, message, server_connector):
|
||||
try:
|
||||
data = message['data']
|
||||
message['data'] = self.command_handlers[data['command']](data)
|
||||
server_connector.send_message(message, scope=Scope.WEBSOCKET)
|
||||
except KeyError:
|
||||
LOG.error('IGNORING MESSAGE: Invalid message received: %s', message)
|
||||
|
||||
def handle_take_snapshot(self, data):
|
||||
LOG.debug('Taking snapshots of directories %s', self.snapshot_providers.keys())
|
||||
for provider in self.snapshot_providers.values():
|
||||
provider.take_snapshot()
|
||||
return data
|
||||
|
||||
def handle_restore_snapshot(self, data):
|
||||
date = dateparser.parse(
|
||||
data.get(
|
||||
'value',
|
||||
datetime.now().isoformat()
|
||||
)
|
||||
)
|
||||
LOG.debug(
|
||||
'Restoring snapshots (@ %s) of directories %s',
|
||||
date,
|
||||
self.snapshot_providers.keys()
|
||||
)
|
||||
for provider in self.snapshot_providers.values():
|
||||
provider.restore_snapshot(date)
|
||||
return data
|
||||
|
||||
def handle_exclude(self, data):
|
||||
exclude_unix_patterns = data['value']
|
||||
if not isinstance(exclude_unix_patterns, list):
|
||||
raise KeyError
|
||||
|
||||
for provider in self.snapshot_providers.values():
|
||||
provider.exclude = exclude_unix_patterns
|
||||
return data
|
3
lib/tfw/components/terminal/__init__.py
Normal file
3
lib/tfw/components/terminal/__init__.py
Normal file
@ -0,0 +1,3 @@
|
||||
from .terminal_handler import TerminalHandler
|
||||
from .terminal_commands_handler import TerminalCommandsHandler
|
||||
from .commands_equal import CommandsEqual
|
@ -1,7 +1,7 @@
|
||||
from shlex import split
|
||||
from re import search
|
||||
|
||||
from tfw.decorators.lazy_property import lazy_property
|
||||
from tfw.internals.lazy import lazy_property
|
||||
|
||||
|
||||
class CommandsEqual:
|
@ -2,7 +2,7 @@ from re import findall
|
||||
from re import compile as compileregex
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
from tfw.components.inotify import InotifyObserver
|
||||
from tfw.internals.inotify import InotifyObserver
|
||||
|
||||
|
||||
class HistoryMonitor(ABC, InotifyObserver):
|
9
lib/tfw/components/terminal/terminal_commands_handler.py
Normal file
9
lib/tfw/components/terminal/terminal_commands_handler.py
Normal file
@ -0,0 +1,9 @@
|
||||
from .terminal_commands import TerminalCommands
|
||||
|
||||
|
||||
class TerminalCommandsHandler(TerminalCommands):
|
||||
keys = ['history.bash']
|
||||
|
||||
def handle_event(self, message, _):
|
||||
command = message['value']
|
||||
self.callback(command)
|
86
lib/tfw/components/terminal/terminal_handler.py
Normal file
86
lib/tfw/components/terminal/terminal_handler.py
Normal file
@ -0,0 +1,86 @@
|
||||
import logging
|
||||
|
||||
from .history_monitor import BashMonitor
|
||||
from .terminado_mini_server import TerminadoMiniServer
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TerminalHandler:
|
||||
keys = ['shell']
|
||||
"""
|
||||
Event handler responsible for managing terminal sessions for frontend xterm
|
||||
sessions to connect to. You need to instanciate this in order for frontend
|
||||
terminals to work.
|
||||
|
||||
This EventHandler accepts messages that have a data['command'] key specifying
|
||||
a command to be executed.
|
||||
The API of each command is documented in their respective handler.
|
||||
"""
|
||||
def __init__(self, *, port, user, workind_directory, histfile):
|
||||
"""
|
||||
:param key: key this EventHandler listens to
|
||||
:param monitor: tfw.components.HistoryMonitor instance to read command history from
|
||||
"""
|
||||
self.server_connector = None
|
||||
self._histfile = histfile
|
||||
self._historymonitor = None
|
||||
bash_as_user_cmd = ['sudo', '-u', user, 'bash']
|
||||
|
||||
self.terminado_server = TerminadoMiniServer(
|
||||
'/terminal',
|
||||
port,
|
||||
workind_directory,
|
||||
bash_as_user_cmd
|
||||
)
|
||||
|
||||
self.commands = {
|
||||
'write': self.write,
|
||||
'read': self.read
|
||||
}
|
||||
|
||||
self.terminado_server.listen()
|
||||
|
||||
def start(self):
|
||||
self._historymonitor = BashMonitor(self.server_connector, self._histfile)
|
||||
self._historymonitor.start()
|
||||
|
||||
@property
|
||||
def historymonitor(self):
|
||||
return self._historymonitor
|
||||
|
||||
def handle_event(self, message, _):
|
||||
try:
|
||||
data = message['data']
|
||||
message['data'] = self.commands[data['command']](data)
|
||||
except KeyError:
|
||||
LOG.error('IGNORING MESSAGE: Invalid message received: %s', message)
|
||||
|
||||
def write(self, data):
|
||||
"""
|
||||
Writes a string to the terminal session (on the pty level).
|
||||
Useful for pre-typing and executing commands for the user.
|
||||
|
||||
:param data: TFW message data containing 'value'
|
||||
(command to be written to the pty)
|
||||
"""
|
||||
self.terminado_server.pty.write(data['value'])
|
||||
return data
|
||||
|
||||
def read(self, data):
|
||||
"""
|
||||
Reads the history of commands executed.
|
||||
|
||||
:param data: TFW message data containing 'count'
|
||||
(the number of history elements to return)
|
||||
:return dict: message with list of commands in data['history']
|
||||
"""
|
||||
data['count'] = int(data.get('count', 1))
|
||||
if self.historymonitor:
|
||||
data['history'] = self.historymonitor.history[-data['count']:]
|
||||
return data
|
||||
|
||||
def cleanup(self):
|
||||
self.terminado_server.stop()
|
||||
self.historymonitor.stop()
|
Reference in New Issue
Block a user