mirror of
https://github.com/avatao-content/baseimage-tutorial-framework
synced 2024-11-15 01:47:16 +00:00
commit
81790e18e1
@ -4,12 +4,25 @@ from .message_storage import FrontendMessageStorage
|
|||||||
|
|
||||||
|
|
||||||
class FrontendHandler:
|
class FrontendHandler:
|
||||||
keys = ['message', 'queueMessages', 'dashboard', 'console']
|
# keys = [
|
||||||
|
# 'console.read',
|
||||||
|
# 'console.write',
|
||||||
|
# 'console.showLiveLogs',
|
||||||
|
# 'console.rewriteContentWithProcessLogsOnDeploy',
|
||||||
|
# 'dashboard.layout',
|
||||||
|
# 'dashboard.hideMessages',
|
||||||
|
# 'dashboard.terminalMenuItem',
|
||||||
|
# 'dashboard.reloadFrontend',
|
||||||
|
# 'dashboard.reloadIframe',
|
||||||
|
# 'message.config',
|
||||||
|
# 'message.queue',
|
||||||
|
# 'message.send'
|
||||||
|
# ]
|
||||||
|
keys = ['console', 'dashboard', 'message', 'ide.read', 'recover']
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.connector = None
|
self.connector = None
|
||||||
self.keys = [*type(self).keys, 'recover']
|
self._frontend_message_storage = FrontendMessageStorage()
|
||||||
self._frontend_message_storage = FrontendMessageStorage(type(self).keys)
|
|
||||||
|
|
||||||
def send_message(self, message):
|
def send_message(self, message):
|
||||||
self.connector.send_message(message, scope=Scope.WEBSOCKET)
|
self.connector.send_message(message, scope=Scope.WEBSOCKET)
|
||||||
@ -18,8 +31,13 @@ class FrontendHandler:
|
|||||||
self._frontend_message_storage.save_message(message)
|
self._frontend_message_storage.save_message(message)
|
||||||
if message['key'] == 'recover':
|
if message['key'] == 'recover':
|
||||||
self.recover_frontend()
|
self.recover_frontend()
|
||||||
|
if self._filter_message(message):
|
||||||
self.send_message(message)
|
self.send_message(message)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _filter_message(message):
|
||||||
|
return not message['key'].startswith('ide')
|
||||||
|
|
||||||
def recover_frontend(self):
|
def recover_frontend(self):
|
||||||
for message in self._frontend_message_storage.messages:
|
for message in self._frontend_message_storage.messages:
|
||||||
self.send_message(message)
|
self.send_message(message)
|
||||||
|
@ -1,48 +1,38 @@
|
|||||||
class MessageSender:
|
class MessageSender:
|
||||||
"""
|
|
||||||
Provides mechanisms to send messages to our frontend messaging component.
|
|
||||||
"""
|
|
||||||
def __init__(self, uplink):
|
def __init__(self, uplink):
|
||||||
self.uplink = uplink
|
self.uplink = uplink
|
||||||
self.key = 'message'
|
|
||||||
self.queue_key = 'queueMessages'
|
|
||||||
|
|
||||||
def send(self, originator, message):
|
def send(self, message, originator=None):
|
||||||
"""
|
|
||||||
Sends a message.
|
|
||||||
:param originator: name of sender to be displayed on the frontend
|
|
||||||
:param message: message to send
|
|
||||||
"""
|
|
||||||
message = {
|
message = {
|
||||||
'key': self.key,
|
'key': 'message.send',
|
||||||
'data': {
|
|
||||||
'originator': originator,
|
|
||||||
'message': message
|
'message': message
|
||||||
}
|
}
|
||||||
}
|
if originator:
|
||||||
|
message['originator'] = originator
|
||||||
self.uplink.send_message(message)
|
self.uplink.send_message(message)
|
||||||
|
|
||||||
def queue_messages(self, originator, messages):
|
def queue_messages(self, messages, originator=None):
|
||||||
"""
|
message_queue = {
|
||||||
Queues a list of messages to be displayed in a chatbot-like manner.
|
'key': 'message.queue',
|
||||||
:param originator: name of sender to be displayed on the frontend
|
'value': []
|
||||||
:param messages: list of messages to queue
|
|
||||||
"""
|
|
||||||
message = {
|
|
||||||
'key': self.queue_key,
|
|
||||||
'data': {
|
|
||||||
'messages': [
|
|
||||||
{'message': message, 'originator': originator}
|
|
||||||
for message in messages
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
for message in messages:
|
||||||
self.uplink.send_message(message)
|
next_message = {'message': message}
|
||||||
|
if originator:
|
||||||
|
next_message['originator'] = originator
|
||||||
|
message_queue['value'].append(next_message)
|
||||||
|
self.uplink.send_message(message_queue)
|
||||||
|
|
||||||
|
def set_originator(self, originator):
|
||||||
|
self.uplink.send_message({
|
||||||
|
'key': 'message.config',
|
||||||
|
'originator': originator
|
||||||
|
})
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def generate_messages_from_queue(queue_message):
|
def generate_messages_from_queue(queue_message):
|
||||||
for message in queue_message['data']['messages']:
|
for message in queue_message['value']:
|
||||||
yield {
|
yield {
|
||||||
'key': 'message',
|
'key': 'message.send',
|
||||||
'data': message
|
**message
|
||||||
}
|
}
|
||||||
|
@ -29,16 +29,25 @@ class MessageStorage(ABC):
|
|||||||
|
|
||||||
|
|
||||||
class FrontendMessageStorage(MessageStorage):
|
class FrontendMessageStorage(MessageStorage):
|
||||||
def __init__(self, keys):
|
|
||||||
self._keys = keys
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
def _filter_message(self, message):
|
def _filter_message(self, message):
|
||||||
key = message['key']
|
return message['key'].startswith((
|
||||||
return key in self._keys
|
'console.write',
|
||||||
|
'dashboard.layout',
|
||||||
|
'dashboard.terminalMenuItem',
|
||||||
|
'message.send',
|
||||||
|
'message.config',
|
||||||
|
'ide.read'
|
||||||
|
))
|
||||||
|
|
||||||
def _transform_message(self, message):
|
def _transform_message(self, message):
|
||||||
if message['key'] == 'queueMessages':
|
transformations = {
|
||||||
yield from MessageSender.generate_messages_from_queue(message)
|
'message.queue': MessageSender.generate_messages_from_queue,
|
||||||
else:
|
'ide.read': self._delete_ide_content
|
||||||
|
}
|
||||||
|
if message['key'] in transformations:
|
||||||
|
yield from transformations[message['key']](message)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _delete_ide_content(message):
|
||||||
|
del message['content']
|
||||||
yield message
|
yield message
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from tfw.internals.crypto import KeyManager, sign_message, verify_message
|
from tfw.internals.crypto import KeyManager, sign_message
|
||||||
from tfw.internals.networking import Scope
|
from tfw.internals.networking import Scope
|
||||||
|
|
||||||
from .fsm_updater import FSMUpdater
|
from .fsm_updater import FSMUpdater
|
||||||
@ -10,56 +10,31 @@ LOG = logging.getLogger(__name__)
|
|||||||
|
|
||||||
class FSMHandler:
|
class FSMHandler:
|
||||||
keys = ['fsm']
|
keys = ['fsm']
|
||||||
"""
|
|
||||||
EventHandler responsible for managing the state machine of
|
|
||||||
the framework (TFW FSM).
|
|
||||||
|
|
||||||
tfw.networking.TFWServer instances automatically send 'trigger'
|
|
||||||
commands to the event handler listening on the 'fsm' key,
|
|
||||||
which should be an instance of this event handler.
|
|
||||||
|
|
||||||
This event handler accepts messages that have a
|
|
||||||
data['command'] key specifying a command to be executed.
|
|
||||||
|
|
||||||
An 'fsm_update' message is broadcasted after every successful
|
|
||||||
command.
|
|
||||||
"""
|
|
||||||
def __init__(self, *, fsm_type):
|
def __init__(self, *, fsm_type):
|
||||||
self.fsm = fsm_type()
|
self.fsm = fsm_type()
|
||||||
self._fsm_updater = FSMUpdater(self.fsm)
|
self._fsm_updater = FSMUpdater(self.fsm)
|
||||||
self.auth_key = KeyManager().auth_key
|
self.auth_key = KeyManager().auth_key
|
||||||
|
|
||||||
self.command_handlers = {
|
self.command_handlers = {
|
||||||
'trigger': self.handle_trigger,
|
'fsm.step' : self.handle_step,
|
||||||
'update': self.handle_update
|
'fsm.announce' : self.handle_announce
|
||||||
}
|
}
|
||||||
|
|
||||||
def handle_event(self, message, connector):
|
def handle_event(self, message, connector):
|
||||||
try:
|
try:
|
||||||
message = self.command_handlers[message['data']['command']](message)
|
message = self.command_handlers[message['key']](message)
|
||||||
if message:
|
if message:
|
||||||
fsm_update_message = self._fsm_updater.fsm_update
|
fsm_update_message = self._fsm_updater.fsm_update
|
||||||
sign_message(self.auth_key, message)
|
|
||||||
sign_message(self.auth_key, fsm_update_message)
|
sign_message(self.auth_key, fsm_update_message)
|
||||||
connector.send_message(fsm_update_message, Scope.BROADCAST)
|
connector.send_message(fsm_update_message, Scope.BROADCAST)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
LOG.error('IGNORING MESSAGE: Invalid message received: %s', message)
|
LOG.error('IGNORING MESSAGE: Invalid message received: %s', message)
|
||||||
|
|
||||||
def handle_trigger(self, message):
|
def handle_step(self, message):
|
||||||
"""
|
if self.fsm.step(message['trigger']):
|
||||||
Attempts to step the FSM with the supplied trigger.
|
|
||||||
|
|
||||||
:param message: TFW message with a data field containing
|
|
||||||
the action to try triggering in data['value']
|
|
||||||
"""
|
|
||||||
trigger = message['data']['value']
|
|
||||||
if self.fsm.step(trigger):
|
|
||||||
return message
|
return message
|
||||||
return None
|
|
||||||
|
|
||||||
def handle_update(self, message):
|
def handle_announce(self, message):
|
||||||
"""
|
|
||||||
Does nothing, but triggers an 'fsm_update' message.
|
|
||||||
"""
|
|
||||||
# pylint: disable=no-self-use
|
# pylint: disable=no-self-use
|
||||||
return message
|
return message
|
||||||
|
@ -5,7 +5,7 @@ class FSMUpdater:
|
|||||||
@property
|
@property
|
||||||
def fsm_update(self):
|
def fsm_update(self):
|
||||||
return {
|
return {
|
||||||
'key': 'fsm_update',
|
'key': 'fsm.update',
|
||||||
**self.fsm_update_data
|
**self.fsm_update_data
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,93 +1,56 @@
|
|||||||
from typing import Iterable
|
from functools import wraps
|
||||||
from glob import glob
|
from glob import glob
|
||||||
from fnmatch import fnmatchcase
|
from fnmatch import fnmatchcase
|
||||||
from os.path import basename, isfile, join, relpath, exists, isdir, realpath
|
from os.path import dirname, isdir, isfile, realpath
|
||||||
|
|
||||||
|
|
||||||
|
def _with_is_allowed(func):
|
||||||
|
@wraps(func)
|
||||||
|
def wrapper(self, *args, **kwargs):
|
||||||
|
if self._is_allowed(args[0]): # pylint: disable=protected-access
|
||||||
|
return func(self, *args, **kwargs)
|
||||||
|
raise ValueError('Forbidden path.')
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
class FileManager: # pylint: disable=too-many-instance-attributes
|
class FileManager: # pylint: disable=too-many-instance-attributes
|
||||||
def __init__(self, working_directory, allowed_directories, selected_file=None, exclude=None):
|
def __init__(self, patterns):
|
||||||
self._exclude, self.exclude = [], exclude
|
self.patterns = patterns
|
||||||
self._allowed_directories, self.allowed_directories = None, allowed_directories
|
|
||||||
self._workdir, self.workdir = None, working_directory
|
|
||||||
self._filename, self.filename = None, selected_file or self.files[0]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def exclude(self):
|
|
||||||
return self._exclude
|
|
||||||
|
|
||||||
@exclude.setter
|
|
||||||
def exclude(self, exclude):
|
|
||||||
if exclude is None:
|
|
||||||
return
|
|
||||||
if not isinstance(exclude, Iterable):
|
|
||||||
raise TypeError('Exclude must be Iterable!')
|
|
||||||
self._exclude = exclude
|
|
||||||
|
|
||||||
@property
|
|
||||||
def workdir(self):
|
|
||||||
return self._workdir
|
|
||||||
|
|
||||||
@workdir.setter
|
|
||||||
def workdir(self, directory):
|
|
||||||
if not exists(directory) or not isdir(directory):
|
|
||||||
raise EnvironmentError(f'"{directory}" is not a directory!')
|
|
||||||
if not self._is_in_allowed_dir(directory):
|
|
||||||
raise EnvironmentError(f'Directory "{directory}" is not allowed!')
|
|
||||||
self._workdir = directory
|
|
||||||
|
|
||||||
@property
|
|
||||||
def allowed_directories(self):
|
|
||||||
return self._allowed_directories
|
|
||||||
|
|
||||||
@allowed_directories.setter
|
|
||||||
def allowed_directories(self, directories):
|
|
||||||
self._allowed_directories = [realpath(directory) for directory in directories]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def filename(self):
|
|
||||||
return self._filename
|
|
||||||
|
|
||||||
@filename.setter
|
|
||||||
def filename(self, filename):
|
|
||||||
if filename not in self.files:
|
|
||||||
raise EnvironmentError('No such file in workdir!')
|
|
||||||
self._filename = filename
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def files(self):
|
def files(self):
|
||||||
return [
|
return list(set(
|
||||||
self._relpath(file)
|
path
|
||||||
for file in glob(join(self._workdir, '**/*'), recursive=True)
|
for pattern in self.patterns
|
||||||
if isfile(file)
|
for path in glob(pattern, recursive=True)
|
||||||
and self._is_in_allowed_dir(file)
|
if isfile(path) and self._is_allowed(path)
|
||||||
and not self._is_blacklisted(file)
|
))
|
||||||
]
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def file_contents(self):
|
def parents(self):
|
||||||
with open(self._filepath(self.filename), 'rb', buffering=0) as ifile:
|
return list(set(
|
||||||
|
self._find_directory(pattern)
|
||||||
|
for pattern in self.patterns
|
||||||
|
))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _find_directory(pattern):
|
||||||
|
while pattern and not isdir(pattern):
|
||||||
|
pattern = dirname(pattern)
|
||||||
|
return pattern
|
||||||
|
|
||||||
|
def _is_allowed(self, filepath):
|
||||||
|
return any(
|
||||||
|
fnmatchcase(realpath(filepath), pattern)
|
||||||
|
for pattern in self.patterns
|
||||||
|
)
|
||||||
|
|
||||||
|
@_with_is_allowed
|
||||||
|
def read_file(self, filepath): # pylint: disable=no-self-use
|
||||||
|
with open(filepath, 'rb', buffering=0) as ifile:
|
||||||
return ifile.read().decode(errors='surrogateescape')
|
return ifile.read().decode(errors='surrogateescape')
|
||||||
|
|
||||||
@file_contents.setter
|
@_with_is_allowed
|
||||||
def file_contents(self, value):
|
def write_file(self, filepath, contents): # pylint: disable=no-self-use
|
||||||
with open(self._filepath(self.filename), 'wb', buffering=0) as ofile:
|
with open(filepath, 'wb', buffering=0) as ofile:
|
||||||
ofile.write(value.encode())
|
ofile.write(contents.encode())
|
||||||
|
|
||||||
def _is_in_allowed_dir(self, path):
|
|
||||||
return any(
|
|
||||||
realpath(path).startswith(allowed_dir)
|
|
||||||
for allowed_dir in self.allowed_directories
|
|
||||||
)
|
|
||||||
|
|
||||||
def _is_blacklisted(self, file):
|
|
||||||
return any(
|
|
||||||
fnmatchcase(file, blacklisted) or
|
|
||||||
fnmatchcase(basename(file), blacklisted)
|
|
||||||
for blacklisted in self.exclude
|
|
||||||
)
|
|
||||||
|
|
||||||
def _filepath(self, filename):
|
|
||||||
return join(self._workdir, filename)
|
|
||||||
|
|
||||||
def _relpath(self, filename):
|
|
||||||
return relpath(self._filepath(filename), start=self._workdir)
|
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
# pylint: disable=redefined-outer-name
|
# pylint: disable=redefined-outer-name
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from secrets import token_urlsafe
|
from secrets import token_urlsafe
|
||||||
|
from os import mkdir, symlink
|
||||||
from os.path import join
|
from os.path import join
|
||||||
from os import chdir, mkdir, symlink
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
|
|
||||||
@ -13,112 +13,75 @@ from .file_manager import FileManager
|
|||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class ManagerContext:
|
class ManagerContext:
|
||||||
folder: str
|
workdir: str
|
||||||
|
subdir: str
|
||||||
|
subfile: str
|
||||||
manager: FileManager
|
manager: FileManager
|
||||||
|
|
||||||
def join(self, path):
|
def create_random_file(self, dirname, extension):
|
||||||
return join(self.folder, path)
|
filename = self.join(f'{dirname}/{generate_name()}{extension}')
|
||||||
|
Path(filename).touch()
|
||||||
|
return filename
|
||||||
|
|
||||||
|
def create_random_folder(self, basepath):
|
||||||
|
dirname = self.join(f'{basepath}/{generate_name()}')
|
||||||
|
mkdir(dirname)
|
||||||
|
return dirname
|
||||||
|
|
||||||
|
def create_random_link(self, source, dirname, extension):
|
||||||
|
linkname = self.join(f'{dirname}/{generate_name()}{extension}')
|
||||||
|
symlink(source, linkname)
|
||||||
|
return linkname
|
||||||
|
|
||||||
|
def join(self, path):
|
||||||
|
return join(self.workdir, path)
|
||||||
|
|
||||||
|
def generate_name():
|
||||||
|
return token_urlsafe(16)
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def context():
|
def context():
|
||||||
dirs = {}
|
|
||||||
|
|
||||||
with TemporaryDirectory() as workdir:
|
with TemporaryDirectory() as workdir:
|
||||||
chdir(workdir)
|
subdir = join(workdir, generate_name())
|
||||||
for name in ['allowed', 'excluded', 'invis']:
|
subfile = join(subdir, generate_name() + '.txt')
|
||||||
node = join(workdir, name)
|
mkdir(subdir)
|
||||||
mkdir(node)
|
Path(subfile).touch()
|
||||||
Path(join(node, 'empty.txt')).touch()
|
manager = FileManager([join(workdir, '**/*.txt')])
|
||||||
Path(join(node, 'empty.bin')).touch()
|
yield ManagerContext(workdir, subdir, subfile, manager)
|
||||||
dirs[name] = node
|
|
||||||
|
|
||||||
yield ManagerContext(
|
def test_matching_files(context):
|
||||||
workdir,
|
newdir = context.create_random_folder(context.subdir)
|
||||||
FileManager(
|
newfile = context.create_random_file(newdir, '.txt')
|
||||||
dirs['allowed'],
|
newlink = context.create_random_link(newfile, newdir, '.txt')
|
||||||
[dirs['allowed'], dirs['excluded']],
|
assert set(context.manager.files) == {context.subfile, newfile, newlink}
|
||||||
exclude=['*/excluded/*']
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('subdir', ['allowed/', 'excluded/'])
|
def test_unmatching_files(context):
|
||||||
def test_select_allowed_dirs(context, subdir):
|
newtxt = context.create_random_file(context.workdir, '.txt')
|
||||||
context.manager.workdir = context.join(subdir)
|
newbin = context.create_random_file(context.subdir, '.bin')
|
||||||
assert context.manager.workdir == context.join(subdir)
|
context.create_random_link(newtxt, context.subdir, '.txt')
|
||||||
newdir = context.join(subdir+'deep')
|
context.create_random_link(newbin, context.subdir, '.txt')
|
||||||
mkdir(newdir)
|
assert context.manager.files == [context.subfile]
|
||||||
context.manager.workdir = newdir
|
|
||||||
assert context.manager.workdir == newdir
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('invdir', ['', 'invis'])
|
def test_parents(context):
|
||||||
def test_select_forbidden_dirs(context, invdir):
|
newdir = context.create_random_folder(context.workdir)
|
||||||
fullpath = context.join(invdir)
|
context.manager.patterns += [f'{newdir}/[!/@]*/**/?.c']
|
||||||
with pytest.raises(OSError):
|
assert set(context.manager.parents) == {context.workdir, newdir}
|
||||||
context.manager.workdir = fullpath
|
|
||||||
assert context.manager.workdir != fullpath
|
|
||||||
context.manager.allowed_directories += [fullpath]
|
|
||||||
context.manager.workdir = fullpath
|
|
||||||
assert context.manager.workdir == fullpath
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('filename', ['another.txt', '*.txt'])
|
|
||||||
def test_select_allowed_files(context, filename):
|
|
||||||
Path(context.join('allowed/'+filename)).touch()
|
|
||||||
assert filename in context.manager.files
|
|
||||||
context.manager.filename = filename
|
|
||||||
assert context.manager.filename == filename
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('path', [
|
|
||||||
{'dir': 'allowed/', 'file': 'illegal.bin'},
|
|
||||||
{'dir': 'excluded/', 'file': 'legal.txt'},
|
|
||||||
{'dir': 'allowed/', 'file': token_urlsafe(16)+'.bin'},
|
|
||||||
{'dir': 'excluded/', 'file': token_urlsafe(16)+'.txt'},
|
|
||||||
{'dir': 'allowed/', 'file': token_urlsafe(32)+'.bin'},
|
|
||||||
{'dir': 'excluded/', 'file': token_urlsafe(32)+'.txt'}
|
|
||||||
])
|
|
||||||
def test_select_excluded_files(context, path):
|
|
||||||
context.manager.workdir = context.join(path['dir'])
|
|
||||||
context.manager.exclude = ['*/excluded/*', '*.bin']
|
|
||||||
Path(context.join(path['dir']+path['file'])).touch()
|
|
||||||
assert path['file'] not in context.manager.files
|
|
||||||
with pytest.raises(OSError):
|
|
||||||
context.manager.filename = path['file']
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('path', [
|
|
||||||
{'src': 'excluded/empty.txt', 'dst': 'allowed/link.txt'},
|
|
||||||
{'src': 'invis/empty.txt', 'dst': 'allowed/link.txt'},
|
|
||||||
{'src': 'excluded/empty.txt', 'dst': 'allowed/'+token_urlsafe(16)+'.txt'},
|
|
||||||
{'src': 'invis/empty.txt', 'dst': 'allowed/'+token_urlsafe(16)+'.txt'},
|
|
||||||
{'src': 'excluded/empty.txt', 'dst': 'allowed/'+token_urlsafe(32)+'.txt'},
|
|
||||||
{'src': 'invis/empty.txt', 'dst': 'allowed/'+token_urlsafe(32)+'.txt'}
|
|
||||||
])
|
|
||||||
def test_select_excluded_symlinks(context, path):
|
|
||||||
symlink(context.join(path['src']), context.join(path['dst']))
|
|
||||||
assert path['dst'] not in context.manager.files
|
|
||||||
|
|
||||||
def test_read_write_file(context):
|
def test_read_write_file(context):
|
||||||
for _ in range(128):
|
for _ in range(128):
|
||||||
context.manager.filename = 'empty.txt'
|
|
||||||
content = token_urlsafe(32)
|
content = token_urlsafe(32)
|
||||||
context.manager.file_contents = content
|
context.manager.write_file(context.subfile, content)
|
||||||
assert context.manager.file_contents == content
|
assert context.manager.read_file(context.subfile) == content
|
||||||
with open(context.join('allowed/empty.txt'), 'r') as ifile:
|
with open(context.subfile, 'r') as ifile:
|
||||||
assert ifile.read() == content
|
assert ifile.read() == content
|
||||||
|
|
||||||
def test_regular_ide_actions(context):
|
def test_regular_ide_actions(context):
|
||||||
context.manager.workdir = context.join('allowed')
|
newfile1 = context.create_random_file(context.subdir, '.txt')
|
||||||
newfile1, newfile2 = token_urlsafe(16), token_urlsafe(16)
|
newfile2 = context.create_random_file(context.subdir, '.txt')
|
||||||
Path(context.join(f'allowed/{newfile1}')).touch()
|
for _ in range(4):
|
||||||
Path(context.join(f'allowed/{newfile2}')).touch()
|
|
||||||
for _ in range(8):
|
|
||||||
context.manager.filename = newfile1
|
context.manager.filename = newfile1
|
||||||
content1 = token_urlsafe(32)
|
content1, content2 = token_urlsafe(32), token_urlsafe(32)
|
||||||
context.manager.file_contents = content1
|
context.manager.write_file(newfile1, content1)
|
||||||
context.manager.filename = newfile2
|
context.manager.write_file(newfile2, content2)
|
||||||
content2 = token_urlsafe(32)
|
assert context.manager.read_file(newfile1) == content1
|
||||||
context.manager.file_contents = content2
|
assert context.manager.read_file(newfile2) == content2
|
||||||
context.manager.filename = newfile1
|
|
||||||
assert context.manager.file_contents == content1
|
|
||||||
context.manager.filename = newfile2
|
|
||||||
assert context.manager.file_contents == content2
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
|
from os.path import isfile
|
||||||
|
|
||||||
from tfw.internals.networking import Scope
|
from tfw.internals.networking import Scope
|
||||||
from tfw.internals.inotify import InotifyObserver
|
from tfw.internals.inotify import InotifyObserver
|
||||||
@ -33,160 +34,61 @@ BUILD_ARTIFACTS = (
|
|||||||
|
|
||||||
class IdeHandler:
|
class IdeHandler:
|
||||||
keys = ['ide']
|
keys = ['ide']
|
||||||
# pylint: disable=too-many-arguments,anomalous-backslash-in-string
|
|
||||||
"""
|
|
||||||
Event handler implementing the backend of our browser based IDE.
|
|
||||||
By default all files in the directory specified in __init__ are displayed
|
|
||||||
on the fontend. Note that this is a stateful component.
|
|
||||||
|
|
||||||
When any file in the selected directory changes they are automatically refreshed
|
def __init__(self, *, patterns, initial_file=''):
|
||||||
on the frontend (this is done by listening to inotify events).
|
|
||||||
|
|
||||||
This EventHandler accepts messages that have a data['command'] key specifying
|
|
||||||
a command to be executed.
|
|
||||||
|
|
||||||
The API of each command is documented in their respective handler.
|
|
||||||
"""
|
|
||||||
def __init__(self, *, directory, allowed_directories, selected_file=None, exclude=None):
|
|
||||||
"""
|
|
||||||
:param key: the key this instance should listen to
|
|
||||||
:param directory: working directory which the EventHandler should serve files from
|
|
||||||
:param allowed_directories: list of directories that can be switched to using selectdir
|
|
||||||
:param selected_file: file that is selected by default
|
|
||||||
:param exclude: list of filenames that should not appear between files (for .o, .pyc, etc.)
|
|
||||||
"""
|
|
||||||
self.connector = None
|
self.connector = None
|
||||||
try:
|
self.filemanager = FileManager(patterns)
|
||||||
self.filemanager = FileManager(
|
self._initial_file = initial_file
|
||||||
allowed_directories=allowed_directories,
|
|
||||||
working_directory=directory,
|
|
||||||
selected_file=selected_file,
|
|
||||||
exclude=exclude
|
|
||||||
)
|
|
||||||
except IndexError:
|
|
||||||
raise EnvironmentError(
|
|
||||||
f'No file(s) in IdeEventHandler working_directory "{directory}"!'
|
|
||||||
)
|
|
||||||
|
|
||||||
self.monitor = InotifyObserver(
|
self.monitor = InotifyObserver(
|
||||||
self.filemanager.allowed_directories,
|
path=self.filemanager.parents,
|
||||||
exclude=BUILD_ARTIFACTS
|
exclude=BUILD_ARTIFACTS
|
||||||
)
|
)
|
||||||
self.monitor.on_modified = self._reload_frontend
|
self.monitor.on_modified = self._reload_frontend
|
||||||
self.monitor.start()
|
self.monitor.start()
|
||||||
|
|
||||||
self.commands = {
|
self.commands = {
|
||||||
'read': self.read,
|
'ide.read' : self.read,
|
||||||
'write': self.write,
|
'ide.write' : self.write
|
||||||
'select': self.select,
|
|
||||||
'selectdir': self.select_dir,
|
|
||||||
'exclude': self.exclude
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def initial_file(self):
|
||||||
|
if not isfile(self._initial_file):
|
||||||
|
self._initial_file = self.filemanager.files[0]
|
||||||
|
return self._initial_file
|
||||||
|
|
||||||
def _reload_frontend(self, event): # pylint: disable=unused-argument
|
def _reload_frontend(self, event): # pylint: disable=unused-argument
|
||||||
self.send_message({
|
self.send_message({'key': 'ide.reload'})
|
||||||
'key': 'ide',
|
|
||||||
'data': {'command': 'reload'}
|
|
||||||
})
|
|
||||||
|
|
||||||
def send_message(self, message):
|
def send_message(self, message):
|
||||||
self.connector.send_message(message, scope=Scope.WEBSOCKET)
|
self.connector.send_message(message, scope=Scope.WEBSOCKET)
|
||||||
|
|
||||||
def read(self, data):
|
def read(self, message):
|
||||||
"""
|
if message.get('files'):
|
||||||
Read the currently selected file.
|
self.filemanager.patterns = message['files']
|
||||||
|
|
||||||
:return dict: TFW message data containing key 'content'
|
|
||||||
(contents of the selected file)
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
data['content'] = self.filemanager.file_contents
|
message['content'] = self.filemanager.read_file(message['filename'])
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
data['content'] = 'You have no permission to open that file :('
|
message['content'] = 'You have no permission to open that file :('
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
data['content'] = 'This file was removed :('
|
message['content'] = 'This file was removed :('
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
data['content'] = 'Failed to read file :('
|
message['content'] = 'Failed to read file :('
|
||||||
return data
|
|
||||||
|
|
||||||
def write(self, data):
|
def write(self, message):
|
||||||
"""
|
|
||||||
Overwrites a file with the desired string.
|
|
||||||
|
|
||||||
:param data: TFW message data containing key 'content'
|
|
||||||
(new file content)
|
|
||||||
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
self.filemanager.file_contents = data['content']
|
self.filemanager.write_file(message['filename'], message['content'])
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
LOG.exception('Error writing file!')
|
LOG.exception('Error writing file!')
|
||||||
del data['content']
|
del message['content']
|
||||||
return data
|
|
||||||
|
|
||||||
def select(self, data):
|
|
||||||
"""
|
|
||||||
Selects a file from the current directory.
|
|
||||||
|
|
||||||
:param data: TFW message data containing 'filename'
|
|
||||||
(name of file to select relative to the current directory)
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
self.filemanager.filename = data['filename']
|
|
||||||
except EnvironmentError:
|
|
||||||
LOG.exception('Failed to select file "%s"', data['filename'])
|
|
||||||
return data
|
|
||||||
|
|
||||||
def select_dir(self, data):
|
|
||||||
"""
|
|
||||||
Select a new working directory to display files from.
|
|
||||||
|
|
||||||
:param data: TFW message data containing 'directory'
|
|
||||||
(absolute path of diretory to select.
|
|
||||||
must be a path whitelisted in
|
|
||||||
self.allowed_directories)
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
self.filemanager.workdir = data['directory']
|
|
||||||
try:
|
|
||||||
self.filemanager.filename = self.filemanager.files[0]
|
|
||||||
self.read(data)
|
|
||||||
except IndexError:
|
|
||||||
data['content'] = 'No files in this directory :('
|
|
||||||
except EnvironmentError as err:
|
|
||||||
LOG.error(
|
|
||||||
'Failed to select directory "%s". Reason: %s',
|
|
||||||
data['directory'], str(err)
|
|
||||||
)
|
|
||||||
return data
|
|
||||||
|
|
||||||
def exclude(self, data):
|
|
||||||
"""
|
|
||||||
Overwrite list of excluded files
|
|
||||||
|
|
||||||
:param data: TFW message data containing 'exclude'
|
|
||||||
(list of unix-style filename patterns to be excluded,
|
|
||||||
e.g.: ["\*.pyc", "\*.o")
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
self.filemanager.exclude = list(data['exclude'])
|
|
||||||
except TypeError:
|
|
||||||
LOG.error('Exclude must be Iterable!')
|
|
||||||
return data
|
|
||||||
|
|
||||||
def attach_fileinfo(self, data):
|
|
||||||
"""
|
|
||||||
Basic information included in every response to the frontend.
|
|
||||||
"""
|
|
||||||
data['filename'] = self.filemanager.filename
|
|
||||||
data['files'] = self.filemanager.files
|
|
||||||
data['directory'] = self.filemanager.workdir
|
|
||||||
|
|
||||||
def handle_event(self, message, _):
|
def handle_event(self, message, _):
|
||||||
try:
|
try:
|
||||||
data = message['data']
|
if message['filename'] == '':
|
||||||
message['data'] = self.commands[data['command']](data)
|
message['filename'] = self.initial_file
|
||||||
self.attach_fileinfo(data)
|
self.commands[message['key']](message)
|
||||||
|
message['files'] = self.filemanager.files
|
||||||
self.send_message(message)
|
self.send_message(message)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
LOG.error('IGNORING MESSAGE: Invalid message received: %s', message)
|
LOG.error('IGNORING MESSAGE: Invalid message received: %s', message)
|
||||||
|
@ -7,7 +7,7 @@ from .supervisor import ProcessLogManager
|
|||||||
|
|
||||||
|
|
||||||
class LogInotifyObserver(InotifyObserver, ProcessLogManager):
|
class LogInotifyObserver(InotifyObserver, ProcessLogManager):
|
||||||
def __init__(self, connector, supervisor_uri, process_name, log_tail=0):
|
def __init__(self, connector, process_name, supervisor_uri, log_tail=0):
|
||||||
self._prevent_log_recursion()
|
self._prevent_log_recursion()
|
||||||
self._connector = connector
|
self._connector = connector
|
||||||
self._process_name = process_name
|
self._process_name = process_name
|
||||||
@ -36,10 +36,7 @@ class LogInotifyObserver(InotifyObserver, ProcessLogManager):
|
|||||||
|
|
||||||
def on_modified(self, event):
|
def on_modified(self, event):
|
||||||
self._connector.send_message({
|
self._connector.send_message({
|
||||||
'key': 'processlog',
|
'key': 'process.log.new',
|
||||||
'data': {
|
|
||||||
'command': 'new_log',
|
|
||||||
'stdout': self.read_stdout(self.process_name, tail=self.log_tail),
|
'stdout': self.read_stdout(self.process_name, tail=self.log_tail),
|
||||||
'stderr': self.read_stderr(self.process_name, tail=self.log_tail)
|
'stderr': self.read_stderr(self.process_name, tail=self.log_tail)
|
||||||
}
|
|
||||||
}, Scope.BROADCAST)
|
}, Scope.BROADCAST)
|
||||||
|
@ -9,45 +9,35 @@ LOG = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class ProcessHandler(ProcessManager, ProcessLogManager):
|
class ProcessHandler(ProcessManager, ProcessLogManager):
|
||||||
keys = ['processmanager']
|
keys = ['process']
|
||||||
"""
|
|
||||||
Event handler that can manage processes managed by supervisor.
|
|
||||||
|
|
||||||
This EventHandler accepts messages that have a data['command'] key specifying
|
|
||||||
a command to be executed.
|
|
||||||
Every message must contain a data['process_name'] field with the name of the
|
|
||||||
process to manage. This is the name specified in supervisor config files like so:
|
|
||||||
[program:someprogram]
|
|
||||||
|
|
||||||
Commands available: start, stop, restart, readlog
|
|
||||||
(the names are as self-documenting as it gets)
|
|
||||||
"""
|
|
||||||
def __init__(self, *, supervisor_uri, log_tail=0):
|
def __init__(self, *, supervisor_uri, log_tail=0):
|
||||||
ProcessManager.__init__(self, supervisor_uri)
|
ProcessManager.__init__(self, supervisor_uri)
|
||||||
ProcessLogManager.__init__(self, supervisor_uri)
|
ProcessLogManager.__init__(self, supervisor_uri)
|
||||||
self.log_tail = log_tail
|
self.log_tail = log_tail
|
||||||
|
|
||||||
self.commands = {
|
self.commands = {
|
||||||
'start': self.start_process,
|
'process.start': self.start_process,
|
||||||
'stop': self.stop_process,
|
'process.stop': self.stop_process,
|
||||||
'restart': self.restart_process
|
'process.restart': self.restart_process
|
||||||
}
|
}
|
||||||
|
|
||||||
def handle_event(self, message, connector):
|
def handle_event(self, message, connector):
|
||||||
try:
|
try:
|
||||||
data = message['data']
|
|
||||||
try:
|
try:
|
||||||
self.commands[data['command']](data['process_name'])
|
self.commands[message['key']](message['name'])
|
||||||
except SupervisorFault as fault:
|
except SupervisorFault as fault:
|
||||||
message['data']['error'] = fault.faultString
|
message['error'] = fault.faultString
|
||||||
finally:
|
finally:
|
||||||
message['data']['stdout'] = self.read_stdout(
|
message['stdout'] = self.read_stdout(
|
||||||
data['process_name'],
|
message['name'],
|
||||||
self.log_tail
|
self.log_tail
|
||||||
)
|
)
|
||||||
message['data']['stderr'] = self.read_stderr(
|
message['stderr'] = self.read_stderr(
|
||||||
data['process_name'],
|
message['name'],
|
||||||
self.log_tail
|
self.log_tail
|
||||||
)
|
)
|
||||||
connector.send_message(message, scope=Scope.WEBSOCKET)
|
connector.send_message(message, scope=Scope.WEBSOCKET)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
|
if not message['key'].startswith('process.log'):
|
||||||
LOG.error('IGNORING MESSAGE: Invalid message received: %s', message)
|
LOG.error('IGNORING MESSAGE: Invalid message received: %s', message)
|
||||||
|
@ -6,63 +6,38 @@ LOG = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class ProcessLogHandler:
|
class ProcessLogHandler:
|
||||||
keys = ['logmonitor']
|
keys = ['process.log']
|
||||||
"""
|
|
||||||
Monitors the output of a supervisor process (stdout, stderr) and
|
|
||||||
sends the results to the frontend.
|
|
||||||
|
|
||||||
Accepts messages that have a data['command'] key specifying
|
|
||||||
a command to be executed.
|
|
||||||
|
|
||||||
The API of each command is documented in their respective handler.
|
|
||||||
"""
|
|
||||||
def __init__(self, *, process_name, supervisor_uri, log_tail=0):
|
def __init__(self, *, process_name, supervisor_uri, log_tail=0):
|
||||||
self.connector = None
|
self.connector, self._monitor = None, None
|
||||||
self.process_name = process_name
|
self.process_name = process_name
|
||||||
self._supervisor_uri = supervisor_uri
|
self._supervisor_uri = supervisor_uri
|
||||||
self._initial_log_tail = log_tail
|
self._initial_log_tail = log_tail
|
||||||
self._monitor = None
|
|
||||||
|
|
||||||
self.command_handlers = {
|
self.command_handlers = {
|
||||||
'process_name': self.handle_process_name,
|
'process.log.set': self.handle_set
|
||||||
'log_tail': self.handle_log_tail
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
self._monitor = LogInotifyObserver(
|
self._monitor = LogInotifyObserver(
|
||||||
connector=self.connector,
|
connector=self.connector,
|
||||||
supervisor_uri=self._supervisor_uri,
|
|
||||||
process_name=self.process_name,
|
process_name=self.process_name,
|
||||||
|
supervisor_uri=self._supervisor_uri,
|
||||||
log_tail=self._initial_log_tail
|
log_tail=self._initial_log_tail
|
||||||
)
|
)
|
||||||
self._monitor.start()
|
self._monitor.start()
|
||||||
|
|
||||||
def handle_event(self, message, _):
|
def handle_event(self, message, _):
|
||||||
try:
|
try:
|
||||||
data = message['data']
|
self.command_handlers[message['key']](message)
|
||||||
self.command_handlers[data['command']](data)
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
LOG.error('IGNORING MESSAGE: Invalid message received: %s', message)
|
LOG.error('IGNORING MESSAGE: Invalid message received: %s', message)
|
||||||
|
|
||||||
def handle_process_name(self, data):
|
def handle_set(self, data):
|
||||||
"""
|
if data.get('name'):
|
||||||
Changes the monitored process.
|
self._monitor.process_name = data['name']
|
||||||
|
if data.get('tail'):
|
||||||
:param data: TFW message data containing 'value'
|
self._monitor.log_tail = data['tail']
|
||||||
(name of the process to monitor)
|
|
||||||
"""
|
|
||||||
self._monitor.process_name = data['value']
|
|
||||||
|
|
||||||
def handle_log_tail(self, data):
|
|
||||||
"""
|
|
||||||
Sets tail length of the log the monitor will send
|
|
||||||
to the frontend (the monitor will send back the last
|
|
||||||
'value' characters of the log).
|
|
||||||
|
|
||||||
:param data: TFW message data containing 'value'
|
|
||||||
(new tail length)
|
|
||||||
"""
|
|
||||||
self._monitor.log_tail = data['value']
|
|
||||||
|
|
||||||
def cleanup(self):
|
def cleanup(self):
|
||||||
self._monitor.stop()
|
self._monitor.stop()
|
||||||
|
@ -6,8 +6,6 @@ from datetime import datetime
|
|||||||
|
|
||||||
from dateutil import parser as dateparser
|
from dateutil import parser as dateparser
|
||||||
|
|
||||||
from tfw.internals.networking import Scope
|
|
||||||
|
|
||||||
from .snapshot_provider import SnapshotProvider
|
from .snapshot_provider import SnapshotProvider
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
@ -23,9 +21,9 @@ class SnapshotHandler:
|
|||||||
self.init_snapshot_providers(directories)
|
self.init_snapshot_providers(directories)
|
||||||
|
|
||||||
self.command_handlers = {
|
self.command_handlers = {
|
||||||
'take_snapshot': self.handle_take_snapshot,
|
'snapshot.take': self.handle_take_snapshot,
|
||||||
'restore_snapshot': self.handle_restore_snapshot,
|
'snapshot.restore': self.handle_restore_snapshot,
|
||||||
'exclude': self.handle_exclude
|
'snapshot.exclude': self.handle_exclude
|
||||||
}
|
}
|
||||||
|
|
||||||
def init_snapshot_providers(self, directories):
|
def init_snapshot_providers(self, directories):
|
||||||
@ -45,23 +43,20 @@ class SnapshotHandler:
|
|||||||
makedirs(git_dir, exist_ok=True)
|
makedirs(git_dir, exist_ok=True)
|
||||||
return git_dir
|
return git_dir
|
||||||
|
|
||||||
def handle_event(self, message, connector):
|
def handle_event(self, message, _):
|
||||||
try:
|
try:
|
||||||
data = message['data']
|
self.command_handlers[message['key']](message)
|
||||||
message['data'] = self.command_handlers[data['command']](data)
|
|
||||||
connector.send_message(message, scope=Scope.WEBSOCKET)
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
LOG.error('IGNORING MESSAGE: Invalid message received: %s', message)
|
LOG.error('IGNORING MESSAGE: Invalid message received: %s', message)
|
||||||
|
|
||||||
def handle_take_snapshot(self, data):
|
def handle_take_snapshot(self, _):
|
||||||
LOG.debug('Taking snapshots of directories %s', self.snapshot_providers.keys())
|
LOG.debug('Taking snapshots of directories %s', self.snapshot_providers.keys())
|
||||||
for provider in self.snapshot_providers.values():
|
for provider in self.snapshot_providers.values():
|
||||||
provider.take_snapshot()
|
provider.take_snapshot()
|
||||||
return data
|
|
||||||
|
|
||||||
def handle_restore_snapshot(self, data):
|
def handle_restore_snapshot(self, message):
|
||||||
date = dateparser.parse(
|
date = dateparser.parse(
|
||||||
data.get(
|
message.get(
|
||||||
'value',
|
'value',
|
||||||
datetime.now().isoformat()
|
datetime.now().isoformat()
|
||||||
)
|
)
|
||||||
@ -73,13 +68,11 @@ class SnapshotHandler:
|
|||||||
)
|
)
|
||||||
for provider in self.snapshot_providers.values():
|
for provider in self.snapshot_providers.values():
|
||||||
provider.restore_snapshot(date)
|
provider.restore_snapshot(date)
|
||||||
return data
|
|
||||||
|
|
||||||
def handle_exclude(self, data):
|
def handle_exclude(self, message):
|
||||||
exclude_unix_patterns = data['value']
|
exclude_unix_patterns = message['value']
|
||||||
if not isinstance(exclude_unix_patterns, list):
|
if not isinstance(exclude_unix_patterns, list):
|
||||||
raise KeyError
|
raise KeyError
|
||||||
|
|
||||||
for provider in self.snapshot_providers.values():
|
for provider in self.snapshot_providers.values():
|
||||||
provider.exclude = exclude_unix_patterns
|
provider.exclude = exclude_unix_patterns
|
||||||
return data
|
|
||||||
|
@ -7,40 +7,25 @@ LOG = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class TerminalHandler:
|
class TerminalHandler:
|
||||||
keys = ['shell']
|
keys = ['terminal']
|
||||||
"""
|
|
||||||
Event handler responsible for managing terminal sessions for frontend xterm
|
|
||||||
sessions to connect to. You need to instanciate this in order for frontend
|
|
||||||
terminals to work.
|
|
||||||
|
|
||||||
This EventHandler accepts messages that have a data['command'] key specifying
|
def __init__(self, *, port, user, working_directory, histfile):
|
||||||
a command to be executed.
|
self.connector, self._historymonitor = None, None
|
||||||
The API of each command is documented in their respective handler.
|
|
||||||
"""
|
|
||||||
def __init__(self, *, port, user, workind_directory, histfile):
|
|
||||||
"""
|
|
||||||
:param key: key this EventHandler listens to
|
|
||||||
:param monitor: tfw.components.HistoryMonitor instance to read command history from
|
|
||||||
"""
|
|
||||||
self.connector = None
|
|
||||||
self._histfile = histfile
|
self._histfile = histfile
|
||||||
self._historymonitor = None
|
|
||||||
bash_as_user_cmd = ['sudo', '-u', user, 'bash']
|
bash_as_user_cmd = ['sudo', '-u', user, 'bash']
|
||||||
|
|
||||||
self.terminado_server = TerminadoMiniServer(
|
self.terminado_server = TerminadoMiniServer(
|
||||||
'/terminal',
|
'/terminal',
|
||||||
port,
|
port,
|
||||||
workind_directory,
|
working_directory,
|
||||||
bash_as_user_cmd
|
bash_as_user_cmd
|
||||||
)
|
)
|
||||||
|
self.terminado_server.listen()
|
||||||
|
|
||||||
self.commands = {
|
self.commands = {
|
||||||
'write': self.write,
|
'terminal.write': self.handle_write
|
||||||
'read': self.read
|
|
||||||
}
|
}
|
||||||
|
|
||||||
self.terminado_server.listen()
|
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
self._historymonitor = BashMonitor(self.connector, self._histfile)
|
self._historymonitor = BashMonitor(self.connector, self._histfile)
|
||||||
self._historymonitor.start()
|
self._historymonitor.start()
|
||||||
@ -51,34 +36,12 @@ class TerminalHandler:
|
|||||||
|
|
||||||
def handle_event(self, message, _):
|
def handle_event(self, message, _):
|
||||||
try:
|
try:
|
||||||
data = message['data']
|
self.commands[message['key']](message)
|
||||||
message['data'] = self.commands[data['command']](data)
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
LOG.error('IGNORING MESSAGE: Invalid message received: %s', message)
|
LOG.error('IGNORING MESSAGE: Invalid message received: %s', message)
|
||||||
|
|
||||||
def write(self, data):
|
def handle_write(self, message):
|
||||||
"""
|
self.terminado_server.pty.write(message['command'])
|
||||||
Writes a string to the terminal session (on the pty level).
|
|
||||||
Useful for pre-typing and executing commands for the user.
|
|
||||||
|
|
||||||
:param data: TFW message data containing 'value'
|
|
||||||
(command to be written to the pty)
|
|
||||||
"""
|
|
||||||
self.terminado_server.pty.write(data['value'])
|
|
||||||
return data
|
|
||||||
|
|
||||||
def read(self, data):
|
|
||||||
"""
|
|
||||||
Reads the history of commands executed.
|
|
||||||
|
|
||||||
:param data: TFW message data containing 'count'
|
|
||||||
(the number of history elements to return)
|
|
||||||
:return dict: message with list of commands in data['history']
|
|
||||||
"""
|
|
||||||
data['count'] = int(data.get('count', 1))
|
|
||||||
if self.historymonitor:
|
|
||||||
data['history'] = self.historymonitor.history[-data['count']:]
|
|
||||||
return data
|
|
||||||
|
|
||||||
def cleanup(self):
|
def cleanup(self):
|
||||||
self.terminado_server.stop()
|
self.terminado_server.stop()
|
||||||
|
@ -6,11 +6,7 @@ LOG = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class FSMAware:
|
class FSMAware:
|
||||||
keys = ['fsm_update']
|
keys = ['fsm.update']
|
||||||
"""
|
|
||||||
Base class for stuff that has to be aware of the framework FSM.
|
|
||||||
This is done by processing 'fsm_update' messages.
|
|
||||||
"""
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.fsm_state = None
|
self.fsm_state = None
|
||||||
self.fsm_in_accepted_state = False
|
self.fsm_in_accepted_state = False
|
||||||
@ -18,7 +14,7 @@ class FSMAware:
|
|||||||
self._auth_key = KeyManager().auth_key
|
self._auth_key = KeyManager().auth_key
|
||||||
|
|
||||||
def process_message(self, message):
|
def process_message(self, message):
|
||||||
if message['key'] == 'fsm_update':
|
if message['key'] == 'fsm.update':
|
||||||
if verify_message(self._auth_key, message):
|
if verify_message(self._auth_key, message):
|
||||||
self._handle_fsm_update(message)
|
self._handle_fsm_update(message)
|
||||||
|
|
||||||
|
@ -15,8 +15,6 @@ LOG = logging.getLogger(__name__)
|
|||||||
|
|
||||||
class ZMQDownlinkConnector:
|
class ZMQDownlinkConnector:
|
||||||
def __init__(self, connect_addr):
|
def __init__(self, connect_addr):
|
||||||
self.keys = []
|
|
||||||
self._on_recv_callback = None
|
|
||||||
self._zmq_sub_socket = zmq.Context.instance().socket(zmq.SUB)
|
self._zmq_sub_socket = zmq.Context.instance().socket(zmq.SUB)
|
||||||
self._zmq_sub_socket.setsockopt(zmq.RCVHWM, 0)
|
self._zmq_sub_socket.setsockopt(zmq.RCVHWM, 0)
|
||||||
self._zmq_sub_socket.connect(connect_addr)
|
self._zmq_sub_socket.connect(connect_addr)
|
||||||
@ -25,19 +23,14 @@ class ZMQDownlinkConnector:
|
|||||||
def subscribe(self, *keys):
|
def subscribe(self, *keys):
|
||||||
for key in keys:
|
for key in keys:
|
||||||
self._zmq_sub_socket.setsockopt_string(zmq.SUBSCRIBE, key)
|
self._zmq_sub_socket.setsockopt_string(zmq.SUBSCRIBE, key)
|
||||||
self.keys.append(key)
|
|
||||||
|
|
||||||
def unsubscribe(self, *keys):
|
def unsubscribe(self, *keys):
|
||||||
for key in keys:
|
for key in keys:
|
||||||
self._zmq_sub_socket.setsockopt_string(zmq.UNSUBSCRIBE, key)
|
self._zmq_sub_socket.setsockopt_string(zmq.UNSUBSCRIBE, key)
|
||||||
self.keys.remove(key)
|
|
||||||
|
|
||||||
def register_callback(self, callback):
|
def register_callback(self, callback):
|
||||||
if callback:
|
callback = with_deserialize_tfw_msg(callback) if callback else None
|
||||||
self._on_recv_callback = callback
|
self._zmq_sub_stream.on_recv(callback)
|
||||||
self._zmq_sub_stream.on_recv(with_deserialize_tfw_msg(self._on_recv))
|
|
||||||
else:
|
|
||||||
self._zmq_sub_stream.on_recv(None)
|
|
||||||
|
|
||||||
def recv_message(self, *, block=True):
|
def recv_message(self, *, block=True):
|
||||||
if self._zmq_sub_stream.receiving():
|
if self._zmq_sub_stream.receiving():
|
||||||
@ -48,11 +41,6 @@ class ZMQDownlinkConnector:
|
|||||||
except zmq.ZMQError:
|
except zmq.ZMQError:
|
||||||
raise IOError("No data available to recv!")
|
raise IOError("No data available to recv!")
|
||||||
|
|
||||||
def _on_recv(self, message):
|
|
||||||
key = message['key']
|
|
||||||
if key in self.keys or '' in self.keys:
|
|
||||||
self._on_recv_callback(message)
|
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
self._zmq_sub_stream.close()
|
self._zmq_sub_stream.close()
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user