Skip to content

Commit 6ee066a

Browse files
Live terminal output (#5396)
* Add /logs/raw and /logs/subscribe for getting logs on frontend Hijacks stderr/stdout to send all output data to the client on flush * Use existing send sync method * Fix get_logs should return string * Fix bug * pass no server * fix tests * Fix output flush on linux
1 parent dd5b57e commit 6ee066a

File tree

5 files changed

+131
-17
lines changed

5 files changed

+131
-17
lines changed

api_server/routes/internal/internal_routes.py

Lines changed: 27 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
from typing import Optional
33
from folder_paths import models_dir, user_directory, output_directory, folder_names_and_paths
44
from api_server.services.file_service import FileService
5+
from api_server.services.terminal_service import TerminalService
56
import app.logger
67

78
class InternalRoutes:
@@ -11,14 +12,17 @@ class InternalRoutes:
1112
Check README.md for more information.
1213
1314
'''
14-
def __init__(self):
15+
16+
def __init__(self, prompt_server):
1517
self.routes: web.RouteTableDef = web.RouteTableDef()
1618
self._app: Optional[web.Application] = None
1719
self.file_service = FileService({
1820
"models": models_dir,
1921
"user": user_directory,
2022
"output": output_directory
2123
})
24+
self.prompt_server = prompt_server
25+
self.terminal_service = TerminalService(prompt_server)
2226

2327
def setup_routes(self):
2428
@self.routes.get('/files')
@@ -34,7 +38,28 @@ async def list_files(request):
3438

3539
@self.routes.get('/logs')
3640
async def get_logs(request):
37-
return web.json_response(app.logger.get_logs())
41+
return web.json_response("".join([(l["t"] + " - " + l["m"]) for l in app.logger.get_logs()]))
42+
43+
@self.routes.get('/logs/raw')
44+
async def get_logs(request):
45+
self.terminal_service.update_size()
46+
return web.json_response({
47+
"entries": list(app.logger.get_logs()),
48+
"size": {"cols": self.terminal_service.cols, "rows": self.terminal_service.rows}
49+
})
50+
51+
@self.routes.patch('/logs/subscribe')
52+
async def subscribe_logs(request):
53+
json_data = await request.json()
54+
client_id = json_data["clientId"]
55+
enabled = json_data["enabled"]
56+
if enabled:
57+
self.terminal_service.subscribe(client_id)
58+
else:
59+
self.terminal_service.unsubscribe(client_id)
60+
61+
return web.Response(status=200)
62+
3863

3964
@self.routes.get('/folder_paths')
4065
async def get_folder_paths(request):
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
from app.logger import on_flush
2+
import os
3+
4+
5+
class TerminalService:
6+
def __init__(self, server):
7+
self.server = server
8+
self.cols = None
9+
self.rows = None
10+
self.subscriptions = set()
11+
on_flush(self.send_messages)
12+
13+
def update_size(self):
14+
sz = os.get_terminal_size()
15+
changed = False
16+
if sz.columns != self.cols:
17+
self.cols = sz.columns
18+
changed = True
19+
20+
if sz.lines != self.rows:
21+
self.rows = sz.lines
22+
changed = True
23+
24+
if changed:
25+
return {"cols": self.cols, "rows": self.rows}
26+
27+
return None
28+
29+
def subscribe(self, client_id):
30+
self.subscriptions.add(client_id)
31+
32+
def unsubscribe(self, client_id):
33+
self.subscriptions.discard(client_id)
34+
35+
def send_messages(self, entries):
36+
if not len(entries) or not len(self.subscriptions):
37+
return
38+
39+
new_size = self.update_size()
40+
41+
for client_id in self.subscriptions.copy(): # prevent: Set changed size during iteration
42+
if client_id not in self.server.sockets:
43+
# Automatically unsub if the socket has disconnected
44+
self.unsubscribe(client_id)
45+
continue
46+
47+
self.server.send_sync("logs", {"entries": entries, "size": new_size}, client_id)

app/logger.py

Lines changed: 53 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,31 +1,73 @@
1-
import logging
2-
from logging.handlers import MemoryHandler
31
from collections import deque
2+
from datetime import datetime
3+
import io
4+
import logging
5+
import sys
6+
import threading
47

58
logs = None
6-
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
9+
stdout_interceptor = None
10+
stderr_interceptor = None
11+
12+
13+
class LogInterceptor(io.TextIOWrapper):
14+
def __init__(self, stream, *args, **kwargs):
15+
buffer = stream.buffer
16+
encoding = stream.encoding
17+
super().__init__(buffer, *args, **kwargs, encoding=encoding, line_buffering=stream.line_buffering)
18+
self._lock = threading.Lock()
19+
self._flush_callbacks = []
20+
self._logs_since_flush = []
21+
22+
def write(self, data):
23+
entry = {"t": datetime.now().isoformat(), "m": data}
24+
with self._lock:
25+
self._logs_since_flush.append(entry)
26+
27+
# Simple handling for cr to overwrite the last output if it isnt a full line
28+
# else logs just get full of progress messages
29+
if isinstance(data, str) and data.startswith("\r") and not logs[-1]["m"].endswith("\n"):
30+
logs.pop()
31+
logs.append(entry)
32+
super().write(data)
33+
34+
def flush(self):
35+
super().flush()
36+
for cb in self._flush_callbacks:
37+
cb(self._logs_since_flush)
38+
self._logs_since_flush = []
39+
40+
def on_flush(self, callback):
41+
self._flush_callbacks.append(callback)
742

843

944
def get_logs():
10-
return "\n".join([formatter.format(x) for x in logs])
45+
return logs
46+
1147

48+
def on_flush(callback):
49+
if stdout_interceptor is not None:
50+
stdout_interceptor.on_flush(callback)
51+
if stderr_interceptor is not None:
52+
stderr_interceptor.on_flush(callback)
1253

1354
def setup_logger(log_level: str = 'INFO', capacity: int = 300):
1455
global logs
1556
if logs:
1657
return
1758

59+
# Override output streams and log to buffer
60+
logs = deque(maxlen=capacity)
61+
62+
global stdout_interceptor
63+
global stderr_interceptor
64+
stdout_interceptor = sys.stdout = LogInterceptor(sys.stdout)
65+
stderr_interceptor = sys.stderr = LogInterceptor(sys.stderr)
66+
1867
# Setup default global logger
1968
logger = logging.getLogger()
2069
logger.setLevel(log_level)
2170

2271
stream_handler = logging.StreamHandler()
2372
stream_handler.setFormatter(logging.Formatter("%(message)s"))
2473
logger.addHandler(stream_handler)
25-
26-
# Create a memory handler with a deque as its buffer
27-
logs = deque(maxlen=capacity)
28-
memory_handler = MemoryHandler(capacity, flushLevel=logging.INFO)
29-
memory_handler.buffer = logs
30-
memory_handler.setFormatter(formatter)
31-
logger.addHandler(memory_handler)

server.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,7 @@ def __init__(self, loop):
152152
mimetypes.types_map['.js'] = 'application/javascript; charset=utf-8'
153153

154154
self.user_manager = UserManager()
155-
self.internal_routes = InternalRoutes()
155+
self.internal_routes = InternalRoutes(self)
156156
self.supports = ["custom_nodes_from_web"]
157157
self.prompt_queue = None
158158
self.loop = loop

tests-unit/server/routes/internal_routes_test.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
@pytest.fixture
1010
def internal_routes():
11-
return InternalRoutes()
11+
return InternalRoutes(None)
1212

1313
@pytest.fixture
1414
def aiohttp_client_factory(aiohttp_client, internal_routes):
@@ -102,7 +102,7 @@ async def test_file_service_initialization():
102102
# Create a mock instance
103103
mock_file_service_instance = MagicMock(spec=FileService)
104104
MockFileService.return_value = mock_file_service_instance
105-
internal_routes = InternalRoutes()
105+
internal_routes = InternalRoutes(None)
106106

107107
# Check if FileService was initialized with the correct parameters
108108
MockFileService.assert_called_once_with({
@@ -112,4 +112,4 @@ async def test_file_service_initialization():
112112
})
113113

114114
# Verify that the file_service attribute of InternalRoutes is set
115-
assert internal_routes.file_service == mock_file_service_instance
115+
assert internal_routes.file_service == mock_file_service_instance

0 commit comments

Comments
 (0)