Separate independent classes from built-in event handlers

This commit is contained in:
R. Richard
2019-07-05 15:25:59 +02:00
committed by therealkrispet
parent f6a369496d
commit fbe60de968
12 changed files with 160 additions and 157 deletions

View File

@ -1,6 +1,12 @@
from .commands_equal import CommandsEqual
from .file_manager import FileManager
from .history_monitor import HistoryMonitor, BashMonitor, GDBMonitor
from .snapshot_provider import SnapshotProvider
from .terminal_commands import TerminalCommands
from .fsm_aware import FSMAware
from .fsm_updater import FSMUpdater
from .history_monitor import BashMonitor, GDBMonitor
from .log_inotify_observer import LogInotifyObserver
from .message_sender import MessageSender
from .message_storage import FrontendMessageStorage
from .snapshot_provider import SnapshotProvider
from .supervisor import ProcessManager, LogManager
from .terminado_mini_server import TerminadoMiniServer
from .terminal_commands import TerminalCommands

View File

@ -0,0 +1,25 @@
class FSMUpdater:
def __init__(self, fsm):
self.fsm = fsm
@property
def fsm_update(self):
return {
'key': 'fsm_update',
'data': self.fsm_update_data
}
@property
def fsm_update_data(self):
valid_transitions = [
{'trigger': trigger}
for trigger in self.fsm.get_triggers(self.fsm.state)
]
last_fsm_event = self.fsm.event_log[-1]
last_fsm_event['timestamp'] = last_fsm_event['timestamp'].isoformat()
return {
'current_state': self.fsm.state,
'valid_transitions': valid_transitions,
'in_accepted_state': self.fsm.in_accepted_state,
'last_event': last_fsm_event
}

View File

@ -0,0 +1,45 @@
import logging
from tfw.networking import Scope
from .inotify import InotifyObserver
from .supervisor import LogManager
class LogInotifyObserver(InotifyObserver, LogManager):
def __init__(self, server_connector, supervisor_uri, process_name, log_tail=0):
self._prevent_log_recursion()
self._server_connector = server_connector
self._process_name = process_name
self.log_tail = log_tail
self._procinfo = None
LogManager.__init__(self, supervisor_uri)
InotifyObserver.__init__(self, self._get_logfiles())
@staticmethod
def _prevent_log_recursion():
# This is done to prevent inotify event logs triggering themselves (infinite log recursion)
logging.getLogger('watchdog.observers.inotify_buffer').propagate = False
def _get_logfiles(self):
self._procinfo = self.supervisor.getProcessInfo(self._process_name)
return self._procinfo['stdout_logfile'], self._procinfo['stderr_logfile']
@property
def process_name(self):
return self._process_name
@process_name.setter
def process_name(self, process_name):
self._process_name = process_name
self.paths = self._get_logfiles()
def on_modified(self, event):
self._server_connector.send_message({
'key': 'processlog',
'data': {
'command': 'new_log',
'stdout': self.read_stdout(self.process_name, tail=self.log_tail),
'stderr': self.read_stderr(self.process_name, tail=self.log_tail)
}
}, Scope.BROADCAST)

View File

@ -0,0 +1,48 @@
class MessageSender:
"""
Provides mechanisms to send messages to our frontend messaging component.
"""
def __init__(self, uplink):
self.uplink = uplink
self.key = 'message'
self.queue_key = 'queueMessages'
def send(self, originator, message):
"""
Sends a message.
:param originator: name of sender to be displayed on the frontend
:param message: message to send
"""
message = {
'key': self.key,
'data': {
'originator': originator,
'message': message
}
}
self.uplink.send_message(message)
def queue_messages(self, originator, messages):
"""
Queues a list of messages to be displayed in a chatbot-like manner.
:param originator: name of sender to be displayed on the frontend
:param messages: list of messages to queue
"""
message = {
'key': self.queue_key,
'data': {
'messages': [
{'message': message, 'originator': originator}
for message in messages
]
}
}
self.uplink.send_message(message)
@staticmethod
def generate_messages_from_queue(queue_message):
for message in queue_message['data']['messages']:
yield {
'key': 'message',
'data': message
}

View File

@ -0,0 +1,44 @@
from abc import ABC, abstractmethod
from contextlib import suppress
from .message_sender import MessageSender
class MessageStorage(ABC):
def __init__(self):
self._messages = []
def save_message(self, message):
with suppress(KeyError, AttributeError):
if self._filter_message(message):
self._messages.extend(self._transform_message(message))
@abstractmethod
def _filter_message(self, message):
raise NotImplementedError
def _transform_message(self, message): # pylint: disable=no-self-use
yield message
def clear(self):
self._messages.clear()
@property
def messages(self):
yield from self._messages
class FrontendMessageStorage(MessageStorage):
def __init__(self, keys):
self._keys = keys
super().__init__()
def _filter_message(self, message):
key = message['key']
return key in self._keys
def _transform_message(self, message):
if message['key'] == 'queueMessages':
yield from MessageSender.generate_messages_from_queue(message)
else:
yield message

View File

@ -0,0 +1,36 @@
from os import remove
from contextlib import suppress
import xmlrpc.client
from xmlrpc.client import Fault as SupervisorFault
class SupervisorBase:
def __init__(self, supervisor_uri):
self.supervisor = xmlrpc.client.ServerProxy(supervisor_uri).supervisor
class ProcessManager(SupervisorBase):
def stop_process(self, process_name):
with suppress(SupervisorFault):
self.supervisor.stopProcess(process_name)
def start_process(self, process_name):
self.supervisor.startProcess(process_name)
def restart_process(self, process_name):
self.stop_process(process_name)
self.start_process(process_name)
class LogManager(SupervisorBase):
def read_stdout(self, process_name, tail=0):
return self.supervisor.readProcessStdoutLog(process_name, -tail, 0)
def read_stderr(self, process_name, tail=0):
return self.supervisor.readProcessStderrLog(process_name, -tail, 0)
def clear_logs(self, process_name):
for logfile in ('stdout_logfile', 'stderr_logfile'):
with suppress(FileNotFoundError):
remove(self.supervisor.getProcessInfo(process_name)[logfile])
self.supervisor.clearProcessLogs(process_name)