Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 6cd3be6

Browse files
authoredJun 4, 2025··
Merge pull request #50 from rmusser01/dev
Sync
2 parents 35ff559 + b035d5a commit 6cd3be6

File tree

8 files changed

+345
-237
lines changed

8 files changed

+345
-237
lines changed
 

‎CONTRIBUTING.md

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,19 @@ Please ensure all existing tests pass and, if you're adding a new feature or fix
7272
```
7373
4. **Open a pull request** from your fork to the `dev` branch of the `rmusser01/tldw_chatbook` repository.
7474
5. In your pull request description, clearly explain the changes you've made and why. If it addresses an existing issue, link to it (e.g., "Fixes #123").
75-
6. Be prepared to discuss your changes and make adjustments if requested by the maintainers.
75+
6. Be prepared to discuss your changes and make adjustments if requested.
76+
7. **Wait for review**: The project maintainers will review your pull request. They may request changes or approve it for merging.
77+
8. All contributions must be made under the [Tiny Contributor License Agreement](#https://github.com/indieopensource/tiny-cla/blob/main/cla.md). Please include the following text in your pull request description, along with your name in the proper location, indicating your acceptance of the Tiny Contributor License Agreement:
78+
79+
```
80+
# indieopensource.com Tiny Contributor License Agreement
81+
82+
Development Version
83+
84+
I, {{{contributor name}}}, give Robert Musser permission to license my contributions on any terms they like. I am giving them this license in order to make it possible for them to accept my contributions into their project.
85+
86+
***As far as the law allows, my contributions come as is, without any warranty or condition, and I will not be liable to anyone for any damages related to this software or this license, under any kind of legal claim.***
87+
```
7688

7789
## Project Architecture Overview (Simplified)
7890

‎requirements.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,8 @@ loguru
1010
pydantic
1111
pyyaml
1212
httpx
13+
pytest
14+
emoji
1315
# Chunk Library
1416
#tqdm
1517
#langdetect

‎tldw_chatbook/Event_Handlers/chat_events.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2581,7 +2581,6 @@ async def handle_stop_chat_generation_pressed(app: 'TldwCli') -> None:
25812581
loguru_logger.info("Cancellation for a streaming chat request initiated. Worker will handle stream termination.")
25822582
# For streaming, the worker itself should detect cancellation and stop sending StreamChunks.
25832583
# The on_stream_done event (with error or cancellation status) will then handle UI finalization.
2584-
# The on_worker_state_changed in app.py will also disable the button.
25852584

25862585
except Exception as e_cancel:
25872586
loguru_logger.error(f"Error during worker cancellation or UI update: {e_cancel}", exc_info=True)
@@ -2598,11 +2597,12 @@ async def handle_stop_chat_generation_pressed(app: 'TldwCli') -> None:
25982597
# The on_worker_state_changed handler will also try to disable it when the worker eventually stops.
25992598
# This provides immediate visual feedback.
26002599
try:
2601-
stop_button = app.query_one("#stop-chat-generation-button", Button)
2600+
stop_button = app.query_one("#stop-chat-generation", Button) # MODIFIED ID HERE
26022601
stop_button.disabled = True
2603-
loguru_logger.debug("Attempted to disable 'stop-chat-generation' button from handler.")
2602+
loguru_logger.debug("Attempted to disable '#stop-chat-generation' button from handler.")
26042603
except QueryError:
2605-
loguru_logger.error("Could not find 'stop-chat-generation-button' to disable it directly from handler.")
2604+
loguru_logger.error("Could not find '#stop-chat-generation' button to disable it directly from handler.") # MODIFIED ID IN LOG
2605+
26062606

26072607
async def populate_chat_conversation_character_filter_select(app: 'TldwCli') -> None:
26082608
"""Populates the character filter select in the Chat tab's conversation search."""

‎tldw_chatbook/Event_Handlers/worker_events.py

Lines changed: 201 additions & 196 deletions
Large diffs are not rendered by default.

‎tldw_chatbook/UI/Chat_Window.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,8 +48,8 @@ def compose(self) -> ComposeResult:
4848
yield Button(get_char(EMOJI_SEND, FALLBACK_SEND), id="send-chat", classes="send-button")
4949
yield Button("💡", id="respond-for-me-button", classes="action-button suggest-button") # Suggest button
5050
self.app_instance.loguru_logger.debug("ChatWindow: 'respond-for-me-button' composed.")
51-
yield Button(get_char(EMOJI_STOP, FALLBACK_STOP), id="stop-chat-generation", classes="stop-button",
52-
disabled=True)
51+
#yield Button(get_char(EMOJI_STOP, FALLBACK_STOP), id="stop-chat-generation", classes="stop-button",
52+
# disabled=True)
5353
yield Button(get_char(EMOJI_CHARACTER_ICON, FALLBACK_CHARACTER_ICON), id="toggle-chat-right-sidebar",
5454
classes="sidebar-toggle")
5555

‎tldw_chatbook/UI/Conv_Char_Window.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ def compose(self) -> ComposeResult:
123123

124124
# Conversation Details Collapsible
125125
with Collapsible(title="Conversation Details", id="ccp-conversation-details-collapsible",
126-
collapsed=False): # Start expanded
126+
collapsed=True):
127127
yield Static("Title:", classes="sidebar-label")
128128
yield Input(id="conv-char-title-input", placeholder="Conversation title...", classes="sidebar-input")
129129
yield Static("Keywords:", classes="sidebar-label")

‎tldw_chatbook/Widgets/chat_message.py

Lines changed: 67 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,12 @@
44
# Imports
55
#
66
# 3rd-party Libraries
7+
import logging
78
from typing import Optional
89

910
from textual.app import ComposeResult
1011
from textual.containers import Horizontal, Vertical
12+
from textual.css.query import QueryError
1113
from textual.widget import Widget
1214
from textual.widgets import Static, Button, Label # Added Label
1315
from textual.reactive import reactive
@@ -83,8 +85,9 @@ class ChatMessage(Widget):
8385

8486
# Store the raw text content
8587
message_text = reactive("", repaint=True)
86-
role = reactive("User", repaint=True) # "User" or "AI"
87-
generation_complete = reactive(True) # Used for AI messages to show actions
88+
role = reactive("User", repaint=True)
89+
# Use an internal reactive to manage generation status and trigger UI updates
90+
_generation_complete_internal = reactive(True)
8891

8992
# -- Internal state for message metadata ---
9093
message_id_internal: reactive[Optional[str]] = reactive(None)
@@ -108,7 +111,7 @@ def __init__(self,
108111
super().__init__(**kwargs)
109112
self.message_text = message
110113
self.role = role
111-
self.generation_complete = generation_complete
114+
self._generation_complete_internal = generation_complete
112115

113116
self.message_id_internal = message_id
114117
self.message_version_internal = message_version
@@ -124,6 +127,11 @@ def __init__(self,
124127
else: # Any role other than "user" (e.g., "AI", "Default Assistant", "Character Name") gets the -ai style
125128
self.add_class("-ai")
126129

130+
@property
131+
def generation_complete(self) -> bool:
132+
"""Public property to access the generation status."""
133+
return self._generation_complete_internal
134+
127135
def compose(self) -> ComposeResult:
128136
with Vertical():
129137
yield Label(f"{self.role}", classes="message-header")
@@ -133,7 +141,9 @@ def compose(self) -> ComposeResult:
133141
# This should only apply if it's an AI message AND generation is not complete
134142
if self.has_class("-ai") and not self.generation_complete:
135143
actions_class += " -generating"
136-
with Horizontal(classes=actions_class):
144+
145+
with Horizontal(classes=actions_class) as actions_bar:
146+
actions_bar.id = f"actions-bar-{self.id or self.message_id_internal or 'new'}"
137147
# Common buttons
138148
yield Button("Edit", classes="action-button edit-button")
139149
yield Button("📋", classes="action-button copy-button", id="copy") # Emoji for copy
@@ -144,25 +154,67 @@ def compose(self) -> ComposeResult:
144154
yield Button("👍", classes="action-button thumb-up-button", id="thumb-up")
145155
yield Button("👎", classes="action-button thumb-down-button", id="thumb-down")
146156
yield Button("🔄", classes="action-button regenerate-button", id="regenerate") # Emoji for regenerate
147-
if self.generation_complete: # Only show continue if generation is complete
148-
yield Button("↪️", id="continue-response-button", classes="action-button continue-button")
157+
# FIXME For some reason, the entire UI freezes when clicked...
158+
#yield Button("↪️", id="continue-response-button", classes="action-button continue-button")
149159

150160
# Add delete button for all messages at very end
151161
yield Button("🗑️", classes="action-button delete-button") # Emoji for delete ; Label: Delete, Class: delete-button
152162

153-
def update_message_chunk(self, chunk: str):
163+
def watch__generation_complete_internal(self, complete: bool) -> None:
164+
"""
165+
Watcher for the internal generation status.
166+
Updates the actions bar visibility and the continue button visibility for AI messages.
167+
"""
154168
if self.has_class("-ai"):
155-
self.query_one(".message-text", Static).update(self.message_text + chunk)
156-
self.message_text += chunk
169+
try:
170+
actions_container = self.query_one(".message-actions")
171+
continue_button = self.query_one("#continue-response-button", Button)
172+
173+
if complete:
174+
actions_container.remove_class("-generating") # Makes the bar visible via CSS
175+
actions_container.styles.display = "block" # Ensures bar is visible
176+
continue_button.display = True # Makes continue button visible
177+
else:
178+
# This state typically occurs during initialization if generation_complete=False
179+
actions_container.add_class("-generating") # Hides the bar via CSS
180+
# actions_container.styles.display = "none" # CSS rule should handle this
181+
continue_button.display = False # Hides continue button
182+
except QueryError as qe:
183+
# This might happen if the query runs before the widget is fully composed or if it's being removed.
184+
logging.debug(f"ChatMessage (ID: {self.id}, Role: {self.role}): QueryError in watch__generation_complete_internal: {qe}. Widget might not be fully ready or is not an AI message with these components.")
185+
except Exception as e:
186+
logging.error(f"Error in watch__generation_complete_internal for ChatMessage (ID: {self.id}): {e}", exc_info=True)
187+
else: # Not an AI message
188+
try: # Ensure continue button is hidden for non-AI messages if it somehow got queried
189+
continue_button = self.query_one("#continue-response-button", Button)
190+
continue_button.display = False
191+
except QueryError:
192+
pass # Expected for non-AI messages as the button isn't composed.
193+
157194

158195
def mark_generation_complete(self):
196+
"""
197+
Marks the AI message generation as complete.
198+
This will trigger the watcher for _generation_complete_internal to update UI.
199+
"""
159200
if self.has_class("-ai"):
160-
self.generation_complete = True
161-
actions_container = self.query_one(".message-actions")
162-
actions_container.remove_class("-generating")
163-
# Ensure it's displayed if CSS might still hide it via other means,
164-
# though removing '-generating' should be enough if the CSS is specific.
165-
actions_container.styles.display = "block" # or "flex" if it's a flex container
201+
self._generation_complete_internal = True
202+
203+
def on_mount(self) -> None:
204+
"""Ensure initial state of continue button and actions bar is correct after mounting."""
205+
# Trigger the watcher logic based on the initial state.
206+
self.watch__generation_complete_internal(self._generation_complete_internal)
207+
208+
def update_message_chunk(self, chunk: str):
209+
"""Appends a chunk of text to an AI message during streaming."""
210+
# This method is called by handle_streaming_chunk.
211+
# The _generation_complete_internal should be False during streaming.
212+
if self.has_class("-ai") and not self._generation_complete_internal:
213+
# The static_text_widget.update is handled in handle_streaming_chunk
214+
# This method primarily updates the internal message_text.
215+
self.message_text += chunk
216+
# If called at other times, ensure it doesn't break if static_text_widget not found.
217+
# For streaming, handle_streaming_chunk updates the Static widget directly.
166218

167219
#
168220
#

‎tldw_chatbook/app.py

Lines changed: 55 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -2431,35 +2431,72 @@ async def on_worker_state_changed(self, event: Worker.StateChanged) -> None:
24312431
f"Group='{worker_group}', State='{worker_state}', Desc='{worker_description}'"
24322432
)
24332433

2434-
# --- Handle Chat-related API Calls (e.g., API_Call_chat, API_Call_ccp, respond_for_me_worker) ---
2435-
# This section addresses the logic that was causing the crash.
2434+
# --- Handle Chat-related API Calls ---
24362435
if isinstance(worker_name_attr, str) and \
24372436
(worker_name_attr.startswith("API_Call_chat") or
24382437
worker_name_attr.startswith("API_Call_ccp") or
24392438
worker_name_attr == "respond_for_me_worker"):
24402439

24412440
self.loguru_logger.debug(f"Chat-related worker '{worker_name_attr}' detected. State: {worker_state}.")
2441+
stop_button_id_selector = "#stop-chat-generation" # Correct ID selector
24422442

2443-
# Log RUNNING state as indicated by the original traceback context
24442443
if worker_state == WorkerState.RUNNING:
24452444
self.loguru_logger.info(f"Chat-related worker '{worker_name_attr}' is RUNNING.")
2446-
# Any specific UI updates for the RUNNING state of a chat worker would go here.
2447-
# (e.g., showing a thinking indicator, disabling input)
2448-
# This is often handled when the worker is initially started.
2445+
try:
2446+
# Enable the stop button
2447+
stop_button_widget = self.query_one(stop_button_id_selector, Button)
2448+
stop_button_widget.disabled = False
2449+
self.loguru_logger.info(f"Button '{stop_button_id_selector}' ENABLED.")
2450+
except QueryError:
2451+
self.loguru_logger.error(f"Could not find button '{stop_button_id_selector}' to enable it.")
2452+
# Note: The original code delegated SUCCESS/ERROR states.
2453+
# RUNNING state for chat workers was not explicitly handled here for the stop button.
24492454

2450-
# For SUCCESS or ERROR states, the logic is complex (updating UI, DB, etc.).
2451-
# Delegate to the specialized handler in worker_events.py.
2452-
if worker_state == WorkerState.SUCCESS or worker_state == WorkerState.ERROR:
2453-
self.loguru_logger.debug(
2454-
f"Delegating state {worker_state} for chat-related worker '{worker_name_attr}' to worker_handlers."
2455-
)
2456-
await worker_handlers.handle_api_call_worker_state_changed(self, event)
2455+
elif worker_state in [WorkerState.SUCCESS, WorkerState.ERROR, WorkerState.CANCELLED]:
2456+
self.loguru_logger.info(f"Chat-related worker '{worker_name_attr}' finished with state {worker_state}.")
2457+
try:
2458+
# Disable the stop button
2459+
stop_button_widget = self.query_one(stop_button_id_selector, Button)
2460+
stop_button_widget.disabled = True
2461+
self.loguru_logger.info(f"Button '{stop_button_id_selector}' DISABLED.")
2462+
except QueryError:
2463+
self.loguru_logger.error(f"Could not find button '{stop_button_id_selector}' to disable it.")
24572464

2458-
# If there are other states (like PENDING, CANCELLED) to handle directly for chat workers:
2459-
# elif worker_state == WorkerState.PENDING:
2460-
# self.loguru_logger.debug(f"Chat-related worker '{worker_name_attr}' is PENDING.")
2461-
# else:
2462-
# self.loguru_logger.debug(f"Chat-related worker '{worker_name_attr}' in unhandled state: {worker_state}")
2465+
# Existing delegation for SUCCESS/ERROR, which might update UI based on worker result.
2466+
# The worker_handlers.handle_api_call_worker_state_changed should focus on
2467+
# processing the worker's result/error, not managing the stop button's disabled state,
2468+
# as we are now handling it directly here.
2469+
if worker_state == WorkerState.SUCCESS or worker_state == WorkerState.ERROR:
2470+
self.loguru_logger.debug(
2471+
f"Delegating state {worker_state} for chat-related worker '{worker_name_attr}' to worker_handlers for result processing."
2472+
)
2473+
# This handler is responsible for updating the ChatMessage widget with the final response or error.
2474+
await worker_handlers.handle_api_call_worker_state_changed(self, event)
2475+
2476+
elif worker_state == WorkerState.CANCELLED:
2477+
self.loguru_logger.info(f"Worker '{worker_name_attr}' was cancelled.")
2478+
# The StreamDone event (if streaming) or the handle_stop_chat_generation_pressed
2479+
# (if non-streaming) should handle updating the AI message widget UI.
2480+
# We've already disabled the stop button above.
2481+
# If the StreamDone event doesn't appropriately update the current_ai_message_widget display
2482+
# for cancellations, some finalization logic might be needed here too.
2483+
# For now, assuming StreamDone or the stop handler manage the message UI.
2484+
if self.current_ai_message_widget and not self.current_ai_message_widget.generation_complete:
2485+
self.loguru_logger.debug("Finalizing AI message widget UI due to worker CANCELLED state.")
2486+
# Attempt to update the message UI to reflect cancellation if not already handled by StreamDone
2487+
try:
2488+
static_text_widget = self.current_ai_message_widget.query_one(".message-text", Static)
2489+
# Check if already updated by handle_stop_chat_generation_pressed for non-streaming
2490+
if "[italic]Chat generation cancelled by user.[/]" not in self.current_ai_message_widget.message_text:
2491+
self.current_ai_message_widget.message_text += "\n[italic](Stream Cancelled)[/]"
2492+
static_text_widget.update(Text.from_markup(self.current_ai_message_widget.message_text))
2493+
2494+
self.current_ai_message_widget.mark_generation_complete()
2495+
self.current_ai_message_widget = None # Clear ref
2496+
except QueryError as qe_cancel_ui:
2497+
self.loguru_logger.error(f"Error updating AI message UI on CANCELLED state: {qe_cancel_ui}")
2498+
else:
2499+
self.loguru_logger.debug(f"Chat-related worker '{worker_name_attr}' in other state: {worker_state}")
24632500

24642501
# --- Handle Llama.cpp Server Worker (identified by group) ---
24652502
# This handles the case where worker_name_attr was a list.

0 commit comments

Comments
 (0)
Please sign in to comment.