Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -70,11 +70,13 @@ def load_initial_greeting(filepath="greeting_prompt.txt") -> str:
|
|
| 70 |
|
| 71 |
async def chat_fn(user_input: str, history: dict, input_graph_state: dict, uuid: UUID, prompt: str, search_enabled: bool, download_website_text_enabled: bool):
|
| 72 |
"""
|
|
|
|
|
|
|
| 73 |
Args:
|
| 74 |
user_input (str): The user's input message
|
| 75 |
-
history (dict): The history of the conversation in gradio
|
| 76 |
input_graph_state (dict): The current state of the graph. This includes tool call history
|
| 77 |
-
uuid (UUID): The unique identifier for the current conversation
|
| 78 |
prompt (str): The system prompt
|
| 79 |
Yields:
|
| 80 |
str: The output message
|
|
@@ -188,6 +190,56 @@ async def chat_fn(user_input: str, history: dict, input_graph_state: dict, uuid:
|
|
| 188 |
user_error_message = "There was an error processing your request. Please try again."
|
| 189 |
yield user_error_message, gr.skip(), False
|
| 190 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 191 |
def clear():
|
| 192 |
"""Clear the current conversation state"""
|
| 193 |
return dict(), uuid4()
|
|
@@ -204,8 +256,14 @@ async def populate_followup_questions(end_of_chat_response: bool, messages: dict
|
|
| 204 |
if not end_of_chat_response or not messages or len(messages) == 0:
|
| 205 |
return *[gr.skip() for _ in range(FOLLOWUP_QUESTION_NUMBER)], False
|
| 206 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 207 |
# Check if the last message is from assistant
|
| 208 |
-
if messages[-1]["role"
|
| 209 |
return *[gr.skip() for _ in range(FOLLOWUP_QUESTION_NUMBER)], False
|
| 210 |
|
| 211 |
try:
|
|
@@ -240,13 +298,21 @@ async def summarize_chat(end_of_chat_response: bool, messages: dict, sidebar_sum
|
|
| 240 |
not end_of_chat_response or
|
| 241 |
not messages or
|
| 242 |
len(messages) == 0 or
|
| 243 |
-
messages[-1]["role"] != "assistant" or
|
| 244 |
isinstance(sidebar_summaries, type(lambda x: x)) or
|
| 245 |
uuid in sidebar_summaries
|
| 246 |
)
|
| 247 |
if should_return:
|
| 248 |
return gr.skip(), gr.skip()
|
| 249 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 250 |
# Filter valid messages
|
| 251 |
filtered_messages = []
|
| 252 |
for msg in messages:
|
|
@@ -300,7 +366,25 @@ async def new_tab(uuid, gradio_graph, messages, tabs, prompt, sidebar_summaries)
|
|
| 300 |
|
| 301 |
# Load initial greeting for new chat
|
| 302 |
greeting_text = load_initial_greeting()
|
| 303 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 304 |
|
| 305 |
new_prompt = prompt if prompt else "You are a helpful DIY assistant."
|
| 306 |
|
|
@@ -461,6 +545,18 @@ if __name__ == "__main__":
|
|
| 461 |
else:
|
| 462 |
print("💾 Using persistent browser state (data persists after refresh)")
|
| 463 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 464 |
print() # Add spacing
|
| 465 |
|
| 466 |
with gr.Blocks(title="DIYO - DIY Assistant", fill_height=True, css=CSS, elem_id="main-app") as demo:
|
|
@@ -639,7 +735,7 @@ if __name__ == "__main__":
|
|
| 639 |
chatbot_kwargs["type"] = "messages"
|
| 640 |
logger.info("Using 'messages' type for chatbot")
|
| 641 |
else:
|
| 642 |
-
logger.warning("Chatbot 'type' parameter not supported,
|
| 643 |
|
| 644 |
# Check if 'show_copy_button' parameter is supported
|
| 645 |
if 'show_copy_button' in init_params:
|
|
@@ -651,12 +747,8 @@ if __name__ == "__main__":
|
|
| 651 |
|
| 652 |
except Exception as e:
|
| 653 |
logger.warning(f"Error checking Chatbot parameters: {e}")
|
| 654 |
-
# Use minimal parameters as fallback
|
| 655 |
chatbot_kwargs = {"height": 400}
|
| 656 |
-
try:
|
| 657 |
-
chatbot_kwargs["type"] = "messages"
|
| 658 |
-
except:
|
| 659 |
-
pass
|
| 660 |
|
| 661 |
chatbot = gr.Chatbot(**chatbot_kwargs)
|
| 662 |
|
|
@@ -812,41 +904,120 @@ if __name__ == "__main__":
|
|
| 812 |
# Clear functionality - implement manually since chatbot.clear() is not available in older Gradio versions
|
| 813 |
# We'll handle clearing through the clear chat button instead
|
| 814 |
|
| 815 |
-
# Main chat interface - with compatibility checks
|
|
|
|
| 816 |
chat_interface_kwargs = {
|
| 817 |
"chatbot": chatbot,
|
| 818 |
"fn": chat_fn,
|
| 819 |
-
"additional_inputs": [
|
| 820 |
-
current_langgraph_state,
|
| 821 |
-
current_uuid_state,
|
| 822 |
-
prompt_textbox,
|
| 823 |
-
checkbox_search_enabled,
|
| 824 |
-
checkbox_download_website_text,
|
| 825 |
-
],
|
| 826 |
-
"additional_outputs": [
|
| 827 |
-
current_langgraph_state,
|
| 828 |
-
end_of_assistant_response_state
|
| 829 |
-
],
|
| 830 |
"textbox": textbox,
|
| 831 |
}
|
| 832 |
|
| 833 |
# Check if newer ChatInterface parameters are supported
|
| 834 |
try:
|
| 835 |
init_params = gr.ChatInterface.__init__.__code__.co_varnames
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 836 |
|
| 837 |
# Check if 'type' parameter is supported
|
| 838 |
if 'type' in init_params:
|
| 839 |
chat_interface_kwargs["type"] = "messages"
|
|
|
|
| 840 |
|
| 841 |
# Check if 'multimodal' parameter is supported
|
| 842 |
if 'multimodal' in init_params:
|
| 843 |
chat_interface_kwargs["multimodal"] = multimodal
|
|
|
|
| 844 |
|
| 845 |
except Exception as e:
|
| 846 |
logger.warning(f"Error checking ChatInterface parameters: {e}")
|
| 847 |
# Keep minimal parameters as fallback
|
| 848 |
|
| 849 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 850 |
|
| 851 |
# New chat button functionality
|
| 852 |
new_chat_button.click(
|
|
@@ -894,15 +1065,34 @@ if __name__ == "__main__":
|
|
| 894 |
buttons = [gr.Button(visible=False) for _ in range(len(followup_question_buttons))]
|
| 895 |
return btn, *buttons
|
| 896 |
|
| 897 |
-
|
| 898 |
-
|
| 899 |
-
|
| 900 |
-
|
| 901 |
-
|
| 902 |
-
|
| 903 |
-
|
| 904 |
-
|
| 905 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 906 |
|
| 907 |
# Event handlers for chatbot changes - with compatibility checks
|
| 908 |
def setup_change_handler(fn, inputs, outputs, trigger_mode=None):
|
|
@@ -970,10 +1160,17 @@ if __name__ == "__main__":
|
|
| 970 |
"""Handle initial greeting when the app loads"""
|
| 971 |
if current_is_new_user_flag:
|
| 972 |
greeting_message_text = load_initial_greeting()
|
| 973 |
-
greeting_entry = {"role": "assistant", "content": greeting_message_text}
|
| 974 |
|
| 975 |
if not isinstance(existing_chat_history, list):
|
| 976 |
existing_chat_history = []
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 977 |
|
| 978 |
updated_chat_history = [greeting_entry] + existing_chat_history
|
| 979 |
updated_is_new_user_flag = False
|
|
@@ -1004,7 +1201,8 @@ if __name__ == "__main__":
|
|
| 1004 |
def load_initial_greeting():
|
| 1005 |
"""Load initial greeting for users without BrowserState"""
|
| 1006 |
greeting_text = load_initial_greeting()
|
| 1007 |
-
|
|
|
|
| 1008 |
|
| 1009 |
# Launch the application
|
| 1010 |
demo.launch(debug=True, share=True)
|
|
|
|
| 70 |
|
| 71 |
async def chat_fn(user_input: str, history: dict, input_graph_state: dict, uuid: UUID, prompt: str, search_enabled: bool, download_website_text_enabled: bool):
|
| 72 |
"""
|
| 73 |
+
Chat function that handles both 'messages' and 'tuples' format for compatibility
|
| 74 |
+
|
| 75 |
Args:
|
| 76 |
user_input (str): The user's input message
|
| 77 |
+
history (dict): The history of the conversation in gradio (format depends on chatbot type)
|
| 78 |
input_graph_state (dict): The current state of the graph. This includes tool call history
|
| 79 |
+
uuid (UUID): The unique identifier for the current conversation
|
| 80 |
prompt (str): The system prompt
|
| 81 |
Yields:
|
| 82 |
str: The output message
|
|
|
|
| 190 |
user_error_message = "There was an error processing your request. Please try again."
|
| 191 |
yield user_error_message, gr.skip(), False
|
| 192 |
|
| 193 |
+
|
| 194 |
+
def convert_to_tuples_format(messages_list):
|
| 195 |
+
"""Convert messages format to tuples format for older Gradio versions"""
|
| 196 |
+
if not isinstance(messages_list, list):
|
| 197 |
+
return []
|
| 198 |
+
|
| 199 |
+
tuples = []
|
| 200 |
+
user_msg = None
|
| 201 |
+
|
| 202 |
+
for msg in messages_list:
|
| 203 |
+
if isinstance(msg, dict):
|
| 204 |
+
role = msg.get("role", "")
|
| 205 |
+
content = msg.get("content", "")
|
| 206 |
+
|
| 207 |
+
if role == "user":
|
| 208 |
+
user_msg = content
|
| 209 |
+
elif role == "assistant":
|
| 210 |
+
if user_msg is not None:
|
| 211 |
+
tuples.append((user_msg, content))
|
| 212 |
+
user_msg = None
|
| 213 |
+
else:
|
| 214 |
+
# Assistant message without user message, add empty user message
|
| 215 |
+
tuples.append((None, content))
|
| 216 |
+
elif isinstance(msg, tuple) and len(msg) == 2:
|
| 217 |
+
# Already in tuple format
|
| 218 |
+
tuples.append(msg)
|
| 219 |
+
|
| 220 |
+
# If there's a hanging user message, add it with empty assistant response
|
| 221 |
+
if user_msg is not None:
|
| 222 |
+
tuples.append((user_msg, ""))
|
| 223 |
+
|
| 224 |
+
return tuples
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
def convert_from_tuples_format(tuples_list):
|
| 228 |
+
"""Convert tuples format to messages format"""
|
| 229 |
+
if not isinstance(tuples_list, list):
|
| 230 |
+
return []
|
| 231 |
+
|
| 232 |
+
messages = []
|
| 233 |
+
for item in tuples_list:
|
| 234 |
+
if isinstance(item, tuple) and len(item) == 2:
|
| 235 |
+
user_msg, assistant_msg = item
|
| 236 |
+
if user_msg:
|
| 237 |
+
messages.append({"role": "user", "content": user_msg})
|
| 238 |
+
if assistant_msg:
|
| 239 |
+
messages.append({"role": "assistant", "content": assistant_msg})
|
| 240 |
+
|
| 241 |
+
return messages
|
| 242 |
+
|
| 243 |
def clear():
|
| 244 |
"""Clear the current conversation state"""
|
| 245 |
return dict(), uuid4()
|
|
|
|
| 256 |
if not end_of_chat_response or not messages or len(messages) == 0:
|
| 257 |
return *[gr.skip() for _ in range(FOLLOWUP_QUESTION_NUMBER)], False
|
| 258 |
|
| 259 |
+
# Convert tuples format to messages format if needed
|
| 260 |
+
if isinstance(messages, list) and len(messages) > 0:
|
| 261 |
+
if isinstance(messages[0], tuple):
|
| 262 |
+
# Convert from tuples to messages format
|
| 263 |
+
messages = convert_from_tuples_format(messages)
|
| 264 |
+
|
| 265 |
# Check if the last message is from assistant
|
| 266 |
+
if not messages or (isinstance(messages[-1], dict) and messages[-1].get("role") != "assistant"):
|
| 267 |
return *[gr.skip() for _ in range(FOLLOWUP_QUESTION_NUMBER)], False
|
| 268 |
|
| 269 |
try:
|
|
|
|
| 298 |
not end_of_chat_response or
|
| 299 |
not messages or
|
| 300 |
len(messages) == 0 or
|
|
|
|
| 301 |
isinstance(sidebar_summaries, type(lambda x: x)) or
|
| 302 |
uuid in sidebar_summaries
|
| 303 |
)
|
| 304 |
if should_return:
|
| 305 |
return gr.skip(), gr.skip()
|
| 306 |
|
| 307 |
+
# Convert tuples format to messages format if needed
|
| 308 |
+
if isinstance(messages, list) and len(messages) > 0:
|
| 309 |
+
if isinstance(messages[0], tuple):
|
| 310 |
+
messages = convert_from_tuples_format(messages)
|
| 311 |
+
|
| 312 |
+
# Check if the last message is from assistant
|
| 313 |
+
if not messages or (isinstance(messages[-1], dict) and messages[-1].get("role") != "assistant"):
|
| 314 |
+
return gr.skip(), gr.skip()
|
| 315 |
+
|
| 316 |
# Filter valid messages
|
| 317 |
filtered_messages = []
|
| 318 |
for msg in messages:
|
|
|
|
| 366 |
|
| 367 |
# Load initial greeting for new chat
|
| 368 |
greeting_text = load_initial_greeting()
|
| 369 |
+
|
| 370 |
+
# Determine format based on current chatbot configuration
|
| 371 |
+
# Check if we're using tuples format (older Gradio) or messages format
|
| 372 |
+
try:
|
| 373 |
+
# Try to detect the format from existing messages
|
| 374 |
+
uses_tuples_format = True
|
| 375 |
+
if messages and len(messages) > 0:
|
| 376 |
+
if isinstance(messages[0], dict) and "role" in messages[0]:
|
| 377 |
+
uses_tuples_format = False
|
| 378 |
+
|
| 379 |
+
if uses_tuples_format:
|
| 380 |
+
new_chat_messages_for_display = [(None, greeting_text)]
|
| 381 |
+
else:
|
| 382 |
+
new_chat_messages_for_display = [{"role": "assistant", "content": greeting_text}]
|
| 383 |
+
|
| 384 |
+
except Exception as e:
|
| 385 |
+
logger.warning(f"Error determining chat format: {e}")
|
| 386 |
+
# Default to tuples format for older Gradio
|
| 387 |
+
new_chat_messages_for_display = [(None, greeting_text)]
|
| 388 |
|
| 389 |
new_prompt = prompt if prompt else "You are a helpful DIY assistant."
|
| 390 |
|
|
|
|
| 545 |
else:
|
| 546 |
print("💾 Using persistent browser state (data persists after refresh)")
|
| 547 |
|
| 548 |
+
# Log available Gradio components for debugging
|
| 549 |
+
available_components = []
|
| 550 |
+
for attr_name in dir(gr):
|
| 551 |
+
if attr_name[0].isupper() and not attr_name.startswith('_'):
|
| 552 |
+
available_components.append(attr_name)
|
| 553 |
+
|
| 554 |
+
logger.info(f"Available Gradio components: {len(available_components)} components detected")
|
| 555 |
+
key_components = ['ChatInterface', 'Sidebar', 'BrowserState', 'MultimodalTextbox']
|
| 556 |
+
for component in key_components:
|
| 557 |
+
status = "✅" if hasattr(gr, component) else "❌"
|
| 558 |
+
logger.info(f" {status} {component}")
|
| 559 |
+
|
| 560 |
print() # Add spacing
|
| 561 |
|
| 562 |
with gr.Blocks(title="DIYO - DIY Assistant", fill_height=True, css=CSS, elem_id="main-app") as demo:
|
|
|
|
| 735 |
chatbot_kwargs["type"] = "messages"
|
| 736 |
logger.info("Using 'messages' type for chatbot")
|
| 737 |
else:
|
| 738 |
+
logger.warning("Chatbot 'type' parameter not supported, will use deprecated 'tuples' format")
|
| 739 |
|
| 740 |
# Check if 'show_copy_button' parameter is supported
|
| 741 |
if 'show_copy_button' in init_params:
|
|
|
|
| 747 |
|
| 748 |
except Exception as e:
|
| 749 |
logger.warning(f"Error checking Chatbot parameters: {e}")
|
| 750 |
+
# Use minimal parameters as fallback
|
| 751 |
chatbot_kwargs = {"height": 400}
|
|
|
|
|
|
|
|
|
|
|
|
|
| 752 |
|
| 753 |
chatbot = gr.Chatbot(**chatbot_kwargs)
|
| 754 |
|
|
|
|
| 904 |
# Clear functionality - implement manually since chatbot.clear() is not available in older Gradio versions
|
| 905 |
# We'll handle clearing through the clear chat button instead
|
| 906 |
|
| 907 |
+
# Main chat interface - with extensive compatibility checks
|
| 908 |
+
# Start with minimal required parameters
|
| 909 |
chat_interface_kwargs = {
|
| 910 |
"chatbot": chatbot,
|
| 911 |
"fn": chat_fn,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 912 |
"textbox": textbox,
|
| 913 |
}
|
| 914 |
|
| 915 |
# Check if newer ChatInterface parameters are supported
|
| 916 |
try:
|
| 917 |
init_params = gr.ChatInterface.__init__.__code__.co_varnames
|
| 918 |
+
logger.info(f"ChatInterface supported parameters: {list(init_params)}")
|
| 919 |
+
|
| 920 |
+
# Check each parameter individually
|
| 921 |
+
if 'additional_inputs' in init_params:
|
| 922 |
+
chat_interface_kwargs["additional_inputs"] = [
|
| 923 |
+
current_langgraph_state,
|
| 924 |
+
current_uuid_state,
|
| 925 |
+
prompt_textbox,
|
| 926 |
+
checkbox_search_enabled,
|
| 927 |
+
checkbox_download_website_text,
|
| 928 |
+
]
|
| 929 |
+
logger.info("Added additional_inputs to ChatInterface")
|
| 930 |
+
|
| 931 |
+
if 'additional_outputs' in init_params:
|
| 932 |
+
chat_interface_kwargs["additional_outputs"] = [
|
| 933 |
+
current_langgraph_state,
|
| 934 |
+
end_of_assistant_response_state
|
| 935 |
+
]
|
| 936 |
+
logger.info("Added additional_outputs to ChatInterface")
|
| 937 |
+
else:
|
| 938 |
+
logger.warning("ChatInterface 'additional_outputs' not supported - some features may be limited")
|
| 939 |
|
| 940 |
# Check if 'type' parameter is supported
|
| 941 |
if 'type' in init_params:
|
| 942 |
chat_interface_kwargs["type"] = "messages"
|
| 943 |
+
logger.info("Added type='messages' to ChatInterface")
|
| 944 |
|
| 945 |
# Check if 'multimodal' parameter is supported
|
| 946 |
if 'multimodal' in init_params:
|
| 947 |
chat_interface_kwargs["multimodal"] = multimodal
|
| 948 |
+
logger.info(f"Added multimodal={multimodal} to ChatInterface")
|
| 949 |
|
| 950 |
except Exception as e:
|
| 951 |
logger.warning(f"Error checking ChatInterface parameters: {e}")
|
| 952 |
# Keep minimal parameters as fallback
|
| 953 |
|
| 954 |
+
# Try to create ChatInterface with compatibility handling
|
| 955 |
+
try:
|
| 956 |
+
chat_interface = gr.ChatInterface(**chat_interface_kwargs)
|
| 957 |
+
logger.info("ChatInterface created successfully")
|
| 958 |
+
except TypeError as e:
|
| 959 |
+
logger.error(f"ChatInterface creation failed: {e}")
|
| 960 |
+
logger.info("Falling back to minimal ChatInterface configuration")
|
| 961 |
+
|
| 962 |
+
# Fallback to absolute minimal configuration
|
| 963 |
+
try:
|
| 964 |
+
minimal_kwargs = {
|
| 965 |
+
"chatbot": chatbot,
|
| 966 |
+
"fn": lambda message, history: (message + " (processed)", history + [(message, message + " (processed)")]),
|
| 967 |
+
"textbox": textbox,
|
| 968 |
+
}
|
| 969 |
+
chat_interface = gr.ChatInterface(**minimal_kwargs)
|
| 970 |
+
logger.warning("Using minimal ChatInterface - advanced features disabled")
|
| 971 |
+
except Exception as fallback_error:
|
| 972 |
+
logger.error(f"Even minimal ChatInterface failed: {fallback_error}")
|
| 973 |
+
# Create manual chat functionality as last resort
|
| 974 |
+
chat_interface = None
|
| 975 |
+
logger.info("Creating manual chat interface as fallback")
|
| 976 |
+
|
| 977 |
+
# Manual chat submit function
|
| 978 |
+
def manual_chat_submit(message, history, graph_state, uuid_val, prompt, search_enabled, download_enabled):
|
| 979 |
+
"""Manual chat submission when ChatInterface is not available"""
|
| 980 |
+
try:
|
| 981 |
+
if not message.strip():
|
| 982 |
+
return history, "", graph_state
|
| 983 |
+
|
| 984 |
+
# Add user message
|
| 985 |
+
if not isinstance(history, list):
|
| 986 |
+
history = []
|
| 987 |
+
|
| 988 |
+
# Use tuples format for older Gradio
|
| 989 |
+
history.append((message, "Processing..."))
|
| 990 |
+
|
| 991 |
+
# TODO: Integrate with your actual graph processing here
|
| 992 |
+
# For now, provide a simple response
|
| 993 |
+
response = f"Manual chat mode: {message} (ChatInterface not available in this Gradio version)"
|
| 994 |
+
|
| 995 |
+
# Update the last tuple with the response
|
| 996 |
+
if history:
|
| 997 |
+
history[-1] = (message, response)
|
| 998 |
+
|
| 999 |
+
return history, "", graph_state
|
| 1000 |
+
except Exception as e:
|
| 1001 |
+
logger.error(f"Error in manual chat: {e}")
|
| 1002 |
+
if not isinstance(history, list):
|
| 1003 |
+
history = []
|
| 1004 |
+
history.append((message, f"Error: {str(e)}"))
|
| 1005 |
+
return history, "", graph_state
|
| 1006 |
+
|
| 1007 |
+
# Set up manual chat button
|
| 1008 |
+
textbox.submit(
|
| 1009 |
+
fn=manual_chat_submit,
|
| 1010 |
+
inputs=[
|
| 1011 |
+
textbox,
|
| 1012 |
+
chatbot,
|
| 1013 |
+
current_langgraph_state,
|
| 1014 |
+
current_uuid_state,
|
| 1015 |
+
prompt_textbox,
|
| 1016 |
+
checkbox_search_enabled,
|
| 1017 |
+
checkbox_download_website_text
|
| 1018 |
+
],
|
| 1019 |
+
outputs=[chatbot, textbox, current_langgraph_state]
|
| 1020 |
+
)
|
| 1021 |
|
| 1022 |
# New chat button functionality
|
| 1023 |
new_chat_button.click(
|
|
|
|
| 1065 |
buttons = [gr.Button(visible=False) for _ in range(len(followup_question_buttons))]
|
| 1066 |
return btn, *buttons
|
| 1067 |
|
| 1068 |
+
# Handle followup buttons based on whether ChatInterface is available
|
| 1069 |
+
if chat_interface is not None:
|
| 1070 |
+
for btn in followup_question_buttons:
|
| 1071 |
+
try:
|
| 1072 |
+
btn.click(
|
| 1073 |
+
fn=click_followup_button,
|
| 1074 |
+
inputs=[btn],
|
| 1075 |
+
outputs=[
|
| 1076 |
+
chat_interface.textbox if hasattr(chat_interface, 'textbox') else textbox,
|
| 1077 |
+
*followup_question_buttons
|
| 1078 |
+
]
|
| 1079 |
+
).success(lambda: None, js=TRIGGER_CHATINTERFACE_BUTTON)
|
| 1080 |
+
except Exception as e:
|
| 1081 |
+
logger.warning(f"Error setting up followup button: {e}")
|
| 1082 |
+
# Fallback to basic button functionality
|
| 1083 |
+
btn.click(
|
| 1084 |
+
fn=click_followup_button,
|
| 1085 |
+
inputs=[btn],
|
| 1086 |
+
outputs=[textbox, *followup_question_buttons]
|
| 1087 |
+
)
|
| 1088 |
+
else:
|
| 1089 |
+
logger.warning("ChatInterface not available - followup buttons will have limited functionality")
|
| 1090 |
+
for btn in followup_question_buttons:
|
| 1091 |
+
btn.click(
|
| 1092 |
+
fn=click_followup_button,
|
| 1093 |
+
inputs=[btn],
|
| 1094 |
+
outputs=[textbox, *followup_question_buttons]
|
| 1095 |
+
)
|
| 1096 |
|
| 1097 |
# Event handlers for chatbot changes - with compatibility checks
|
| 1098 |
def setup_change_handler(fn, inputs, outputs, trigger_mode=None):
|
|
|
|
| 1160 |
"""Handle initial greeting when the app loads"""
|
| 1161 |
if current_is_new_user_flag:
|
| 1162 |
greeting_message_text = load_initial_greeting()
|
|
|
|
| 1163 |
|
| 1164 |
if not isinstance(existing_chat_history, list):
|
| 1165 |
existing_chat_history = []
|
| 1166 |
+
|
| 1167 |
+
# Detect format and add greeting accordingly
|
| 1168 |
+
if existing_chat_history and isinstance(existing_chat_history[0], tuple):
|
| 1169 |
+
# Tuples format
|
| 1170 |
+
greeting_entry = (None, greeting_message_text)
|
| 1171 |
+
else:
|
| 1172 |
+
# Messages format
|
| 1173 |
+
greeting_entry = {"role": "assistant", "content": greeting_message_text}
|
| 1174 |
|
| 1175 |
updated_chat_history = [greeting_entry] + existing_chat_history
|
| 1176 |
updated_is_new_user_flag = False
|
|
|
|
| 1201 |
def load_initial_greeting():
|
| 1202 |
"""Load initial greeting for users without BrowserState"""
|
| 1203 |
greeting_text = load_initial_greeting()
|
| 1204 |
+
# Use tuples format for older Gradio versions without BrowserState
|
| 1205 |
+
return [(None, greeting_text)]
|
| 1206 |
|
| 1207 |
# Launch the application
|
| 1208 |
demo.launch(debug=True, share=True)
|