Skip to content

Sync #50

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jun 4, 2025
Merged

Sync #50

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 13 additions & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,19 @@ Please ensure all existing tests pass and, if you're adding a new feature or fix
```
4. **Open a pull request** from your fork to the `dev` branch of the `rmusser01/tldw_chatbook` repository.
5. In your pull request description, clearly explain the changes you've made and why. If it addresses an existing issue, link to it (e.g., "Fixes #123").
6. Be prepared to discuss your changes and make adjustments if requested by the maintainers.
6. Be prepared to discuss your changes and make adjustments if requested.
7. **Wait for review**: The project maintainers will review your pull request. They may request changes or approve it for merging.
8. All contributions must be made under the [Tiny Contributor License Agreement](#https://github.com/indieopensource/tiny-cla/blob/main/cla.md). Please include the following text in your pull request description, along with your name in the proper location, indicating your acceptance of the Tiny Contributor License Agreement:

```
# indieopensource.com Tiny Contributor License Agreement

Development Version

I, {{{contributor name}}}, give Robert Musser permission to license my contributions on any terms they like. I am giving them this license in order to make it possible for them to accept my contributions into their project.

***As far as the law allows, my contributions come as is, without any warranty or condition, and I will not be liable to anyone for any damages related to this software or this license, under any kind of legal claim.***
```

## Project Architecture Overview (Simplified)

Expand Down
2 changes: 2 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ loguru
pydantic
pyyaml
httpx
pytest
emoji
# Chunk Library
#tqdm
#langdetect
Expand Down
8 changes: 4 additions & 4 deletions tldw_chatbook/Event_Handlers/chat_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -2581,7 +2581,6 @@ async def handle_stop_chat_generation_pressed(app: 'TldwCli') -> None:
loguru_logger.info("Cancellation for a streaming chat request initiated. Worker will handle stream termination.")
# For streaming, the worker itself should detect cancellation and stop sending StreamChunks.
# The on_stream_done event (with error or cancellation status) will then handle UI finalization.
# The on_worker_state_changed in app.py will also disable the button.

except Exception as e_cancel:
loguru_logger.error(f"Error during worker cancellation or UI update: {e_cancel}", exc_info=True)
Expand All @@ -2598,11 +2597,12 @@ async def handle_stop_chat_generation_pressed(app: 'TldwCli') -> None:
# The on_worker_state_changed handler will also try to disable it when the worker eventually stops.
# This provides immediate visual feedback.
try:
stop_button = app.query_one("#stop-chat-generation-button", Button)
stop_button = app.query_one("#stop-chat-generation", Button) # MODIFIED ID HERE
stop_button.disabled = True
loguru_logger.debug("Attempted to disable 'stop-chat-generation' button from handler.")
loguru_logger.debug("Attempted to disable '#stop-chat-generation' button from handler.")
except QueryError:
loguru_logger.error("Could not find 'stop-chat-generation-button' to disable it directly from handler.")
loguru_logger.error("Could not find '#stop-chat-generation' button to disable it directly from handler.") # MODIFIED ID IN LOG


async def populate_chat_conversation_character_filter_select(app: 'TldwCli') -> None:
"""Populates the character filter select in the Chat tab's conversation search."""
Expand Down
397 changes: 201 additions & 196 deletions tldw_chatbook/Event_Handlers/worker_events.py

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions tldw_chatbook/UI/Chat_Window.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,8 @@ def compose(self) -> ComposeResult:
yield Button(get_char(EMOJI_SEND, FALLBACK_SEND), id="send-chat", classes="send-button")
yield Button("💡", id="respond-for-me-button", classes="action-button suggest-button") # Suggest button
self.app_instance.loguru_logger.debug("ChatWindow: 'respond-for-me-button' composed.")
yield Button(get_char(EMOJI_STOP, FALLBACK_STOP), id="stop-chat-generation", classes="stop-button",
disabled=True)
#yield Button(get_char(EMOJI_STOP, FALLBACK_STOP), id="stop-chat-generation", classes="stop-button",
# disabled=True)
yield Button(get_char(EMOJI_CHARACTER_ICON, FALLBACK_CHARACTER_ICON), id="toggle-chat-right-sidebar",
classes="sidebar-toggle")

Expand Down
2 changes: 1 addition & 1 deletion tldw_chatbook/UI/Conv_Char_Window.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def compose(self) -> ComposeResult:

# Conversation Details Collapsible
with Collapsible(title="Conversation Details", id="ccp-conversation-details-collapsible",
collapsed=False): # Start expanded
collapsed=True):
yield Static("Title:", classes="sidebar-label")
yield Input(id="conv-char-title-input", placeholder="Conversation title...", classes="sidebar-input")
yield Static("Keywords:", classes="sidebar-label")
Expand Down
82 changes: 67 additions & 15 deletions tldw_chatbook/Widgets/chat_message.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,12 @@
# Imports
#
# 3rd-party Libraries
import logging
from typing import Optional

from textual.app import ComposeResult
from textual.containers import Horizontal, Vertical
from textual.css.query import QueryError
from textual.widget import Widget
from textual.widgets import Static, Button, Label # Added Label
from textual.reactive import reactive
Expand Down Expand Up @@ -83,8 +85,9 @@ class ChatMessage(Widget):

# Store the raw text content
message_text = reactive("", repaint=True)
role = reactive("User", repaint=True) # "User" or "AI"
generation_complete = reactive(True) # Used for AI messages to show actions
role = reactive("User", repaint=True)
# Use an internal reactive to manage generation status and trigger UI updates
_generation_complete_internal = reactive(True)

# -- Internal state for message metadata ---
message_id_internal: reactive[Optional[str]] = reactive(None)
Expand All @@ -108,7 +111,7 @@ def __init__(self,
super().__init__(**kwargs)
self.message_text = message
self.role = role
self.generation_complete = generation_complete
self._generation_complete_internal = generation_complete

self.message_id_internal = message_id
self.message_version_internal = message_version
Expand All @@ -124,6 +127,11 @@ def __init__(self,
else: # Any role other than "user" (e.g., "AI", "Default Assistant", "Character Name") gets the -ai style
self.add_class("-ai")

@property
def generation_complete(self) -> bool:
"""Public property to access the generation status."""
return self._generation_complete_internal

def compose(self) -> ComposeResult:
with Vertical():
yield Label(f"{self.role}", classes="message-header")
Expand All @@ -133,7 +141,9 @@ def compose(self) -> ComposeResult:
# This should only apply if it's an AI message AND generation is not complete
if self.has_class("-ai") and not self.generation_complete:
actions_class += " -generating"
with Horizontal(classes=actions_class):

with Horizontal(classes=actions_class) as actions_bar:
actions_bar.id = f"actions-bar-{self.id or self.message_id_internal or 'new'}"
# Common buttons
yield Button("Edit", classes="action-button edit-button")
yield Button("📋", classes="action-button copy-button", id="copy") # Emoji for copy
Expand All @@ -144,25 +154,67 @@ def compose(self) -> ComposeResult:
yield Button("👍", classes="action-button thumb-up-button", id="thumb-up")
yield Button("👎", classes="action-button thumb-down-button", id="thumb-down")
yield Button("🔄", classes="action-button regenerate-button", id="regenerate") # Emoji for regenerate
if self.generation_complete: # Only show continue if generation is complete
yield Button("↪️", id="continue-response-button", classes="action-button continue-button")
# FIXME For some reason, the entire UI freezes when clicked...
#yield Button("↪️", id="continue-response-button", classes="action-button continue-button")

# Add delete button for all messages at very end
yield Button("🗑️", classes="action-button delete-button") # Emoji for delete ; Label: Delete, Class: delete-button

def update_message_chunk(self, chunk: str):
def watch__generation_complete_internal(self, complete: bool) -> None:
"""
Watcher for the internal generation status.
Updates the actions bar visibility and the continue button visibility for AI messages.
"""
if self.has_class("-ai"):
self.query_one(".message-text", Static).update(self.message_text + chunk)
self.message_text += chunk
try:
actions_container = self.query_one(".message-actions")
continue_button = self.query_one("#continue-response-button", Button)

if complete:
actions_container.remove_class("-generating") # Makes the bar visible via CSS
actions_container.styles.display = "block" # Ensures bar is visible
continue_button.display = True # Makes continue button visible
else:
# This state typically occurs during initialization if generation_complete=False
actions_container.add_class("-generating") # Hides the bar via CSS
# actions_container.styles.display = "none" # CSS rule should handle this
continue_button.display = False # Hides continue button
except QueryError as qe:
# This might happen if the query runs before the widget is fully composed or if it's being removed.
logging.debug(f"ChatMessage (ID: {self.id}, Role: {self.role}): QueryError in watch__generation_complete_internal: {qe}. Widget might not be fully ready or is not an AI message with these components.")
except Exception as e:
logging.error(f"Error in watch__generation_complete_internal for ChatMessage (ID: {self.id}): {e}", exc_info=True)
else: # Not an AI message
try: # Ensure continue button is hidden for non-AI messages if it somehow got queried
continue_button = self.query_one("#continue-response-button", Button)
continue_button.display = False
except QueryError:
pass # Expected for non-AI messages as the button isn't composed.


def mark_generation_complete(self):
"""
Marks the AI message generation as complete.
This will trigger the watcher for _generation_complete_internal to update UI.
"""
if self.has_class("-ai"):
self.generation_complete = True
actions_container = self.query_one(".message-actions")
actions_container.remove_class("-generating")
# Ensure it's displayed if CSS might still hide it via other means,
# though removing '-generating' should be enough if the CSS is specific.
actions_container.styles.display = "block" # or "flex" if it's a flex container
self._generation_complete_internal = True

def on_mount(self) -> None:
"""Ensure initial state of continue button and actions bar is correct after mounting."""
# Trigger the watcher logic based on the initial state.
self.watch__generation_complete_internal(self._generation_complete_internal)

def update_message_chunk(self, chunk: str):
"""Appends a chunk of text to an AI message during streaming."""
# This method is called by handle_streaming_chunk.
# The _generation_complete_internal should be False during streaming.
if self.has_class("-ai") and not self._generation_complete_internal:
# The static_text_widget.update is handled in handle_streaming_chunk
# This method primarily updates the internal message_text.
self.message_text += chunk
# If called at other times, ensure it doesn't break if static_text_widget not found.
# For streaming, handle_streaming_chunk updates the Static widget directly.

#
#
Expand Down
73 changes: 55 additions & 18 deletions tldw_chatbook/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -2431,35 +2431,72 @@ async def on_worker_state_changed(self, event: Worker.StateChanged) -> None:
f"Group='{worker_group}', State='{worker_state}', Desc='{worker_description}'"
)

# --- Handle Chat-related API Calls (e.g., API_Call_chat, API_Call_ccp, respond_for_me_worker) ---
# This section addresses the logic that was causing the crash.
# --- Handle Chat-related API Calls ---
if isinstance(worker_name_attr, str) and \
(worker_name_attr.startswith("API_Call_chat") or
worker_name_attr.startswith("API_Call_ccp") or
worker_name_attr == "respond_for_me_worker"):

self.loguru_logger.debug(f"Chat-related worker '{worker_name_attr}' detected. State: {worker_state}.")
stop_button_id_selector = "#stop-chat-generation" # Correct ID selector

# Log RUNNING state as indicated by the original traceback context
if worker_state == WorkerState.RUNNING:
self.loguru_logger.info(f"Chat-related worker '{worker_name_attr}' is RUNNING.")
# Any specific UI updates for the RUNNING state of a chat worker would go here.
# (e.g., showing a thinking indicator, disabling input)
# This is often handled when the worker is initially started.
try:
# Enable the stop button
stop_button_widget = self.query_one(stop_button_id_selector, Button)
stop_button_widget.disabled = False
self.loguru_logger.info(f"Button '{stop_button_id_selector}' ENABLED.")
except QueryError:
self.loguru_logger.error(f"Could not find button '{stop_button_id_selector}' to enable it.")
# Note: The original code delegated SUCCESS/ERROR states.
# RUNNING state for chat workers was not explicitly handled here for the stop button.

# For SUCCESS or ERROR states, the logic is complex (updating UI, DB, etc.).
# Delegate to the specialized handler in worker_events.py.
if worker_state == WorkerState.SUCCESS or worker_state == WorkerState.ERROR:
self.loguru_logger.debug(
f"Delegating state {worker_state} for chat-related worker '{worker_name_attr}' to worker_handlers."
)
await worker_handlers.handle_api_call_worker_state_changed(self, event)
elif worker_state in [WorkerState.SUCCESS, WorkerState.ERROR, WorkerState.CANCELLED]:
self.loguru_logger.info(f"Chat-related worker '{worker_name_attr}' finished with state {worker_state}.")
try:
# Disable the stop button
stop_button_widget = self.query_one(stop_button_id_selector, Button)
stop_button_widget.disabled = True
self.loguru_logger.info(f"Button '{stop_button_id_selector}' DISABLED.")
except QueryError:
self.loguru_logger.error(f"Could not find button '{stop_button_id_selector}' to disable it.")

# If there are other states (like PENDING, CANCELLED) to handle directly for chat workers:
# elif worker_state == WorkerState.PENDING:
# self.loguru_logger.debug(f"Chat-related worker '{worker_name_attr}' is PENDING.")
# else:
# self.loguru_logger.debug(f"Chat-related worker '{worker_name_attr}' in unhandled state: {worker_state}")
# Existing delegation for SUCCESS/ERROR, which might update UI based on worker result.
# The worker_handlers.handle_api_call_worker_state_changed should focus on
# processing the worker's result/error, not managing the stop button's disabled state,
# as we are now handling it directly here.
if worker_state == WorkerState.SUCCESS or worker_state == WorkerState.ERROR:
self.loguru_logger.debug(
f"Delegating state {worker_state} for chat-related worker '{worker_name_attr}' to worker_handlers for result processing."
)
# This handler is responsible for updating the ChatMessage widget with the final response or error.
await worker_handlers.handle_api_call_worker_state_changed(self, event)

elif worker_state == WorkerState.CANCELLED:
self.loguru_logger.info(f"Worker '{worker_name_attr}' was cancelled.")
# The StreamDone event (if streaming) or the handle_stop_chat_generation_pressed
# (if non-streaming) should handle updating the AI message widget UI.
# We've already disabled the stop button above.
# If the StreamDone event doesn't appropriately update the current_ai_message_widget display
# for cancellations, some finalization logic might be needed here too.
# For now, assuming StreamDone or the stop handler manage the message UI.
if self.current_ai_message_widget and not self.current_ai_message_widget.generation_complete:
self.loguru_logger.debug("Finalizing AI message widget UI due to worker CANCELLED state.")
# Attempt to update the message UI to reflect cancellation if not already handled by StreamDone
try:
static_text_widget = self.current_ai_message_widget.query_one(".message-text", Static)
# Check if already updated by handle_stop_chat_generation_pressed for non-streaming
if "[italic]Chat generation cancelled by user.[/]" not in self.current_ai_message_widget.message_text:
self.current_ai_message_widget.message_text += "\n[italic](Stream Cancelled)[/]"
static_text_widget.update(Text.from_markup(self.current_ai_message_widget.message_text))

self.current_ai_message_widget.mark_generation_complete()
self.current_ai_message_widget = None # Clear ref
except QueryError as qe_cancel_ui:
self.loguru_logger.error(f"Error updating AI message UI on CANCELLED state: {qe_cancel_ui}")
else:
self.loguru_logger.debug(f"Chat-related worker '{worker_name_attr}' in other state: {worker_state}")

# --- Handle Llama.cpp Server Worker (identified by group) ---
# This handles the case where worker_name_attr was a list.
Expand Down
Loading