Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -13,24 +13,8 @@ from langgraph.types import RunnableConfig
|
|
| 13 |
from pydantic import BaseModel
|
| 14 |
from pathlib import Path
|
| 15 |
|
| 16 |
-
|
| 17 |
-
import subprocess
|
| 18 |
-
|
| 19 |
-
# def update_repo():
|
| 20 |
-
# try:
|
| 21 |
-
# subprocess.run(["git", "fetch", "origin"], check=True)
|
| 22 |
-
# subprocess.run(["git", "reset", "--hard", "origin/main"], check=True)
|
| 23 |
-
# subprocess.run([sys.executable, "-m", "pip", "install", "-r", "requirements.txt"], check=True)
|
| 24 |
-
# subprocess.run([sys.executable, "app.py"], check=True)
|
| 25 |
-
# except Exception as e:
|
| 26 |
-
# print(f"Git update failed: {e}")
|
| 27 |
-
|
| 28 |
-
# update_repo()
|
| 29 |
-
|
| 30 |
load_dotenv()
|
| 31 |
|
| 32 |
-
|
| 33 |
-
|
| 34 |
# There are tools set here dependent on environment variables
|
| 35 |
from graph import graph, weak_model, search_enabled # noqa
|
| 36 |
|
|
@@ -55,10 +39,8 @@ def load_initial_greeting(filepath="greeting_prompt.txt") -> str:
|
|
| 55 |
with open(filepath, "r", encoding="utf-8") as f:
|
| 56 |
return f.read().strip()
|
| 57 |
except FileNotFoundError:
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
print(f"Warning: Prompt file '{filepath}' not found. Using default.")
|
| 61 |
-
return "Welcome to the application! (Default Greeting)"
|
| 62 |
|
| 63 |
async def chat_fn(user_input: str, history: dict, input_graph_state: dict, uuid: UUID, prompt: str, search_enabled: bool, download_website_text_enabled: bool):
|
| 64 |
"""
|
|
@@ -72,12 +54,14 @@ async def chat_fn(user_input: str, history: dict, input_graph_state: dict, uuid:
|
|
| 72 |
str: The output message
|
| 73 |
dict|Any: The final state of the graph
|
| 74 |
bool|Any: Whether to trigger follow up questions
|
| 75 |
-
|
| 76 |
-
We do not use gradio history in the graph since we want the ToolMessage in the history
|
| 77 |
-
ordered properly. GraphProcessingState.messages is used as history instead
|
| 78 |
"""
|
| 79 |
try:
|
| 80 |
-
logger.info(f"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 81 |
input_graph_state["tools_enabled"] = {
|
| 82 |
"download_website_text": download_website_text_enabled,
|
| 83 |
"tavily_search_results_json": search_enabled,
|
|
@@ -105,7 +89,7 @@ async def chat_fn(user_input: str, history: dict, input_graph_state: dict, uuid:
|
|
| 105 |
config = RunnableConfig(
|
| 106 |
recursion_limit=20,
|
| 107 |
run_name="user_chat",
|
| 108 |
-
configurable={"thread_id": uuid}
|
| 109 |
)
|
| 110 |
|
| 111 |
output: str = ""
|
|
@@ -119,43 +103,43 @@ async def chat_fn(user_input: str, history: dict, input_graph_state: dict, uuid:
|
|
| 119 |
):
|
| 120 |
if stream_mode == "values":
|
| 121 |
final_state = chunk
|
| 122 |
-
|
| 123 |
-
|
| 124 |
-
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
|
| 128 |
-
|
| 129 |
-
|
| 130 |
-
|
| 131 |
-
|
| 132 |
-
|
| 133 |
-
|
| 134 |
-
|
| 135 |
-
|
| 136 |
-
|
| 137 |
-
|
| 138 |
-
|
| 139 |
-
|
| 140 |
-
|
| 141 |
-
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
|
| 145 |
-
|
| 146 |
-
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
|
| 151 |
-
|
| 152 |
-
|
| 153 |
|
| 154 |
elif stream_mode == "messages":
|
| 155 |
msg, metadata = chunk
|
| 156 |
-
#
|
| 157 |
-
|
| 158 |
-
if
|
| 159 |
current_chunk_text = ""
|
| 160 |
if isinstance(msg.content, str):
|
| 161 |
current_chunk_text = msg.content
|
|
@@ -163,24 +147,23 @@ async def chat_fn(user_input: str, history: dict, input_graph_state: dict, uuid:
|
|
| 163 |
for block in msg.content:
|
| 164 |
if isinstance(block, dict) and block.get("type") == "text":
|
| 165 |
current_chunk_text += block.get("text", "")
|
| 166 |
-
elif isinstance(block, str):
|
| 167 |
current_chunk_text += block
|
| 168 |
|
| 169 |
-
|
| 170 |
-
if current_chunk_text: # Only add and yield if there's actually text
|
| 171 |
output += current_chunk_text
|
| 172 |
yield output, gr.skip(), gr.skip()
|
| 173 |
|
| 174 |
-
#
|
| 175 |
-
# + store the graph state for next iteration
|
| 176 |
-
# yield output, dict(final_state), gr.skip()
|
| 177 |
yield output + " ", dict(final_state), True
|
| 178 |
-
|
| 179 |
-
|
|
|
|
| 180 |
user_error_message = "There was an error processing your request. Please try again."
|
| 181 |
yield user_error_message, gr.skip(), False
|
| 182 |
|
| 183 |
def clear():
|
|
|
|
| 184 |
return dict(), uuid4()
|
| 185 |
|
| 186 |
class FollowupQuestions(BaseModel):
|
|
@@ -190,50 +173,55 @@ class FollowupQuestions(BaseModel):
|
|
| 190 |
async def populate_followup_questions(end_of_chat_response: bool, messages: dict[str, str], uuid: UUID):
|
| 191 |
"""
|
| 192 |
This function gets called a lot due to the asynchronous nature of streaming
|
| 193 |
-
|
| 194 |
Only populate followup questions if streaming has completed and the message is coming from the assistant
|
| 195 |
"""
|
| 196 |
-
if not end_of_chat_response or not messages or messages
|
| 197 |
return *[gr.skip() for _ in range(FOLLOWUP_QUESTION_NUMBER)], False
|
| 198 |
-
|
| 199 |
-
|
| 200 |
-
|
| 201 |
-
|
| 202 |
-
|
| 203 |
-
|
| 204 |
-
|
| 205 |
-
|
| 206 |
-
|
| 207 |
-
if len(follow_up_questions.questions) != FOLLOWUP_QUESTION_NUMBER:
|
| 208 |
-
raise ValueError("Invalid value of followup questions")
|
| 209 |
-
buttons = []
|
| 210 |
-
for i in range(FOLLOWUP_QUESTION_NUMBER):
|
| 211 |
-
buttons.append(
|
| 212 |
-
gr.Button(follow_up_questions.questions[i], visible=True, elem_classes="chat-tab"),
|
| 213 |
)
|
| 214 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 215 |
|
| 216 |
async def summarize_chat(end_of_chat_response: bool, messages: dict, sidebar_summaries: dict, uuid: UUID):
|
| 217 |
"""Summarize chat for tab names"""
|
| 218 |
-
# print("\n------------------------")
|
| 219 |
-
# print("not end_of_chat_response", not end_of_chat_response)
|
| 220 |
-
# print("not messages", not messages)
|
| 221 |
-
# if messages:
|
| 222 |
-
# print("messages[-1][role] != assistant", messages[-1]["role"] != "assistant")
|
| 223 |
-
# print("isinstance(sidebar_summaries, type(lambda x: x))", isinstance(sidebar_summaries, type(lambda x: x)))
|
| 224 |
-
# print("uuid in sidebar_summaries", uuid in sidebar_summaries)
|
| 225 |
should_return = (
|
| 226 |
not end_of_chat_response or
|
| 227 |
not messages or
|
|
|
|
| 228 |
messages[-1]["role"] != "assistant" or
|
| 229 |
-
# This is a bug with gradio
|
| 230 |
isinstance(sidebar_summaries, type(lambda x: x)) or
|
| 231 |
-
# Already created summary
|
| 232 |
uuid in sidebar_summaries
|
| 233 |
)
|
| 234 |
if should_return:
|
| 235 |
return gr.skip(), gr.skip()
|
| 236 |
|
|
|
|
| 237 |
filtered_messages = []
|
| 238 |
for msg in messages:
|
| 239 |
if isinstance(msg, dict) and msg.get("content") and msg["content"].strip():
|
|
@@ -242,15 +230,14 @@ async def summarize_chat(end_of_chat_response: bool, messages: dict, sidebar_sum
|
|
| 242 |
# If we don't have any valid messages after filtering, provide a default summary
|
| 243 |
if not filtered_messages:
|
| 244 |
if uuid not in sidebar_summaries:
|
| 245 |
-
sidebar_summaries[uuid] = "Chat
|
| 246 |
return sidebar_summaries, False
|
| 247 |
|
| 248 |
-
|
| 249 |
-
config = RunnableConfig(
|
| 250 |
-
run_name="summarize_chat",
|
| 251 |
-
configurable={"thread_id": uuid}
|
| 252 |
-
)
|
| 253 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
| 254 |
weak_model_with_config = weak_model.with_config(config)
|
| 255 |
summary_response = await weak_model_with_config.ainvoke([
|
| 256 |
("system", "summarize this chat in 7 tokens or less. Refrain from using periods"),
|
|
@@ -258,104 +245,102 @@ async def summarize_chat(end_of_chat_response: bool, messages: dict, sidebar_sum
|
|
| 258 |
])
|
| 259 |
|
| 260 |
if uuid not in sidebar_summaries:
|
| 261 |
-
sidebar_summaries[uuid] = summary_response.content
|
|
|
|
| 262 |
except Exception as e:
|
| 263 |
logger.error(f"Error summarizing chat: {e}")
|
| 264 |
-
# Provide a fallback summary if an error occurs
|
| 265 |
if uuid not in sidebar_summaries:
|
| 266 |
-
sidebar_summaries[uuid] = "
|
| 267 |
|
| 268 |
return sidebar_summaries, False
|
| 269 |
|
| 270 |
async def new_tab(uuid, gradio_graph, messages, tabs, prompt, sidebar_summaries):
|
|
|
|
| 271 |
new_uuid = uuid4()
|
| 272 |
new_graph = {}
|
| 273 |
-
|
| 274 |
-
|
| 275 |
-
|
| 276 |
-
|
| 277 |
-
|
| 278 |
-
"prompt": prompt,
|
| 279 |
-
}
|
| 280 |
-
suggestion_buttons = []
|
| 281 |
-
for _ in range(FOLLOWUP_QUESTION_NUMBER):
|
| 282 |
-
suggestion_buttons.append(gr.Button(visible=False))
|
| 283 |
-
new_messages = {}
|
| 284 |
-
|
| 285 |
-
# --- MODIFICATION FOR GREETING IN EVERY NEW CHAT ---
|
| 286 |
-
greeting_text = load_initial_greeting() # Get the greeting
|
| 287 |
-
# `gr.Chatbot` expects a list of tuples or list of dicts.
|
| 288 |
-
# For `type="messages"`, it's list of dicts: [{"role": "assistant", "content": "Hello"}]
|
| 289 |
-
# Or list of tuples: [(None, "Hello")]
|
| 290 |
-
# Let's assume your chatbot is configured for list of tuples (None, bot_message) for initial messages
|
| 291 |
-
new_chat_messages_for_display = [{"role": "assistant", "content": greeting_text}]
|
| 292 |
-
# If your chat_interface.chatbot_value expects list of dicts:
|
| 293 |
-
# new_messages_history = [{"role": "assistant", "content": greeting_text}]
|
| 294 |
-
# --- END MODIFICATION ---
|
| 295 |
-
|
| 296 |
-
new_prompt = "You are a helpful assistant."
|
| 297 |
-
return new_uuid, new_graph, new_chat_messages_for_display, tabs, new_prompt, sidebar_summaries, *suggestion_buttons
|
| 298 |
-
|
| 299 |
-
def switch_tab(selected_uuid, tabs, gradio_graph, uuid, messages, prompt):
|
| 300 |
-
# I don't know of another way to lookup uuid other than
|
| 301 |
-
# by the button value
|
| 302 |
-
|
| 303 |
-
# Save current state
|
| 304 |
-
if messages:
|
| 305 |
tabs[uuid] = {
|
| 306 |
"graph": gradio_graph,
|
| 307 |
"messages": messages,
|
| 308 |
-
"prompt": prompt
|
| 309 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 310 |
|
| 311 |
-
|
| 312 |
-
|
| 313 |
-
|
| 314 |
-
|
| 315 |
-
|
| 316 |
-
|
| 317 |
-
|
| 318 |
-
|
| 319 |
-
|
| 320 |
-
|
| 321 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 322 |
|
| 323 |
def delete_tab(current_chat_uuid, selected_uuid, sidebar_summaries, tabs):
|
|
|
|
| 324 |
output_messages = gr.skip()
|
|
|
|
|
|
|
| 325 |
if current_chat_uuid == selected_uuid:
|
| 326 |
-
output_messages =
|
|
|
|
|
|
|
| 327 |
if selected_uuid in tabs:
|
| 328 |
del tabs[selected_uuid]
|
| 329 |
if selected_uuid in sidebar_summaries:
|
| 330 |
del sidebar_summaries[selected_uuid]
|
|
|
|
| 331 |
return sidebar_summaries, tabs, output_messages
|
| 332 |
|
| 333 |
def submit_edit_tab(selected_uuid, sidebar_summaries, text):
|
| 334 |
-
|
|
|
|
|
|
|
| 335 |
return sidebar_summaries, ""
|
| 336 |
|
| 337 |
def load_mesh(mesh_file_name):
|
| 338 |
-
|
| 339 |
-
|
| 340 |
-
def display_initial_greeting(is_new_user_state_value: bool):
|
| 341 |
-
"""
|
| 342 |
-
Determines if a greeting should be displayed and returns the UI updates.
|
| 343 |
-
It also returns the new state for 'is_new_user_for_greeting'.
|
| 344 |
-
"""
|
| 345 |
-
if is_new_user_state_value:
|
| 346 |
-
greeting_message_text = load_initial_greeting()
|
| 347 |
-
# For a chatbot, the history is a list of tuples: [(user_msg, bot_msg)]
|
| 348 |
-
# For an initial message from the bot, user_msg is None.
|
| 349 |
-
initial_chat_history = [(None, greeting_message_text)]
|
| 350 |
-
updated_is_new_user_flag = False # Greeting shown, so set to False
|
| 351 |
-
return initial_chat_history, updated_is_new_user_flag
|
| 352 |
-
else:
|
| 353 |
-
# Not a new user (or already greeted), so no initial message in chat history
|
| 354 |
-
# and the flag remains False.
|
| 355 |
-
return [], False
|
| 356 |
|
| 357 |
def get_sorted_3d_model_examples():
|
|
|
|
| 358 |
examples_dir = Path("./generated_3d_models")
|
|
|
|
|
|
|
|
|
|
|
|
|
| 359 |
if not examples_dir.exists():
|
| 360 |
return []
|
| 361 |
|
|
@@ -366,16 +351,19 @@ def get_sorted_3d_model_examples():
|
|
| 366 |
]
|
| 367 |
|
| 368 |
# Sort files by creation time (latest first)
|
| 369 |
-
|
| 370 |
-
|
| 371 |
-
|
| 372 |
-
|
| 373 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 374 |
|
| 375 |
# Convert to format [[path1], [path2], ...]
|
| 376 |
return [[str(file)] for file in sorted_files]
|
| 377 |
|
| 378 |
-
|
| 379 |
CSS = """
|
| 380 |
footer {visibility: hidden}
|
| 381 |
.followup-question-button {font-size: 12px }
|
|
@@ -404,119 +392,92 @@ footer {visibility: hidden}
|
|
| 404 |
}
|
| 405 |
|
| 406 |
#main-app {
|
| 407 |
-
height: 4600px;
|
| 408 |
-
overflow-y: auto;
|
| 409 |
-
padding-top:
|
| 410 |
}
|
| 411 |
-
|
| 412 |
"""
|
| 413 |
|
| 414 |
-
# We set the ChatInterface textbox id to chat-textbox for this to work
|
| 415 |
TRIGGER_CHATINTERFACE_BUTTON = """
|
| 416 |
function triggerChatButtonClick() {
|
| 417 |
-
|
| 418 |
-
// Find the div with id "chat-textbox"
|
| 419 |
const chatTextbox = document.getElementById("chat-textbox");
|
| 420 |
-
|
| 421 |
if (!chatTextbox) {
|
| 422 |
console.error("Error: Could not find element with id 'chat-textbox'");
|
| 423 |
return;
|
| 424 |
}
|
| 425 |
-
|
| 426 |
-
// Find the button that is a descendant of the div
|
| 427 |
const button = chatTextbox.querySelector("button");
|
| 428 |
-
|
| 429 |
if (!button) {
|
| 430 |
console.error("Error: No button found inside the chat-textbox element");
|
| 431 |
return;
|
| 432 |
}
|
| 433 |
-
|
| 434 |
-
// Trigger the click event
|
| 435 |
button.click();
|
| 436 |
}"""
|
| 437 |
|
| 438 |
-
|
| 439 |
-
|
| 440 |
-
TOGGLE_SIDEBAR_JS = """
|
| 441 |
-
function toggleSidebarVisibility() {
|
| 442 |
-
console.log("Called the side bar funnction");
|
| 443 |
-
const sidebar = document.querySelector(".sidebar svelte-7y53u7 open");
|
| 444 |
-
if (!sidebar) {
|
| 445 |
-
console.error("Error: Could not find the sidebar element");
|
| 446 |
-
return;
|
| 447 |
-
}
|
| 448 |
-
sidebar.classList.toggle("sidebar-collapsed");
|
| 449 |
-
}
|
| 450 |
-
"""
|
| 451 |
-
|
| 452 |
if __name__ == "__main__":
|
| 453 |
-
logger.info("Starting the interface")
|
| 454 |
-
|
|
|
|
|
|
|
| 455 |
is_new_user_for_greeting = gr.State(True)
|
| 456 |
-
|
| 457 |
current_prompt_state = gr.BrowserState(
|
|
|
|
| 458 |
storage_key="current_prompt_state",
|
| 459 |
secret=BROWSER_STORAGE_SECRET,
|
| 460 |
)
|
| 461 |
current_uuid_state = gr.BrowserState(
|
| 462 |
-
uuid4,
|
| 463 |
storage_key="current_uuid_state",
|
| 464 |
secret=BROWSER_STORAGE_SECRET,
|
| 465 |
)
|
| 466 |
current_langgraph_state = gr.BrowserState(
|
| 467 |
-
dict
|
| 468 |
storage_key="current_langgraph_state",
|
| 469 |
secret=BROWSER_STORAGE_SECRET,
|
| 470 |
)
|
| 471 |
-
end_of_assistant_response_state = gr.State(
|
| 472 |
-
|
| 473 |
-
)
|
| 474 |
# [uuid] -> summary of chat
|
| 475 |
sidebar_names_state = gr.BrowserState(
|
| 476 |
-
dict
|
| 477 |
storage_key="sidebar_names_state",
|
| 478 |
secret=BROWSER_STORAGE_SECRET,
|
| 479 |
)
|
| 480 |
# [uuid] -> {"graph": gradio_graph, "messages": messages}
|
| 481 |
offloaded_tabs_data_storage = gr.BrowserState(
|
| 482 |
-
dict
|
| 483 |
storage_key="offloaded_tabs_data_storage",
|
| 484 |
secret=BROWSER_STORAGE_SECRET,
|
| 485 |
)
|
| 486 |
-
|
| 487 |
chatbot_message_storage = gr.BrowserState(
|
| 488 |
-
|
| 489 |
storage_key="chatbot_message_storage",
|
| 490 |
secret=BROWSER_STORAGE_SECRET,
|
| 491 |
)
|
| 492 |
|
|
|
|
| 493 |
with gr.Row(elem_classes="header-margin"):
|
| 494 |
-
# Add the decorated header with ASCII art
|
| 495 |
gr.Markdown("""
|
| 496 |
<div style="display: flex; align-items: center; justify-content: center; text-align: center; padding: 20px; background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); border-radius: 15px; margin-bottom: 20px; color: white; box-shadow: 0 4px 15px rgba(0,0,0,0.2);">
|
| 497 |
-
|
| 498 |
-
╔══════════════════════════════════════════════════════════════════════════════════════════════╗
|
| 499 |
-
║ ║
|
| 500 |
-
║ █████╗ ██████╗ ███████╗███╗ ██╗████████╗ ██████╗ ██╗██╗ ██╗ ██████╗ ║
|
| 501 |
-
║ ██╔══██╗██╔════╝ ██╔════╝████╗ ██║╚══██╔══╝ ██╔══██╗██║╚██╗ ██╔╝██╔═══██╗ ║
|
| 502 |
-
║ ███████║██║ ███╗█████╗ ██╔██╗ ██║ ██║ ██║ ██║██║ ╚████╔╝ ██║ ██║ ║
|
| 503 |
-
║ ██╔══██║██║ ██║██╔══╝ ██║╚██╗██║ ██║ ██║ ██║██║ ╚██╔╝ ██║ ██║ ║
|
| 504 |
-
║ ██║ ██║╚██████╔╝███████╗██║ ╚████║ ██║ ██████╔╝██║ ██║ ╚██████╔╝ ║
|
| 505 |
-
║ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ ╚═══╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ║
|
| 506 |
-
║ ║
|
| 507 |
-
╚══════════════════════════════════════════════════════════════════════════════════════════════╝
|
| 508 |
-
|
| 509 |
-
Let's build things, break boundaries with the help of AI!
|
| 510 |
</div>
|
| 511 |
""")
|
| 512 |
|
|
|
|
| 513 |
with gr.Row():
|
| 514 |
-
prompt_textbox = gr.Textbox(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 515 |
|
|
|
|
| 516 |
with gr.Row():
|
| 517 |
checkbox_search_enabled = gr.Checkbox(
|
| 518 |
value=True,
|
| 519 |
-
label="Enable search",
|
| 520 |
show_label=True,
|
| 521 |
visible=search_enabled,
|
| 522 |
scale=1,
|
|
@@ -524,100 +485,122 @@ if __name__ == "__main__":
|
|
| 524 |
checkbox_download_website_text = gr.Checkbox(
|
| 525 |
value=True,
|
| 526 |
show_label=True,
|
| 527 |
-
label="Enable downloading text from
|
| 528 |
scale=1,
|
| 529 |
)
|
|
|
|
|
|
|
| 530 |
with gr.Row():
|
| 531 |
with gr.Column(scale=2):
|
| 532 |
model_3d_output = gr.Model3D(
|
| 533 |
clear_color=[0.0, 0.0, 0.0, 0.0],
|
| 534 |
-
label="3D Model",
|
| 535 |
-
height=400
|
| 536 |
)
|
| 537 |
with gr.Column(scale=1):
|
| 538 |
-
# Input for the 3D model
|
| 539 |
-
# Using UploadButton is often clearer for users than a clickable Model3D input
|
| 540 |
model_3d_upload_button = gr.UploadButton(
|
| 541 |
-
"Upload 3D Model (.obj, .glb, .gltf)",
|
| 542 |
file_types=[".obj", ".glb", ".gltf"],
|
| 543 |
-
# scale=0 # make it take less space if needed
|
| 544 |
)
|
| 545 |
model_3d_upload_button.upload(
|
| 546 |
fn=load_mesh,
|
| 547 |
inputs=model_3d_upload_button,
|
| 548 |
outputs=model_3d_output
|
| 549 |
)
|
| 550 |
-
|
| 551 |
-
|
| 552 |
-
|
| 553 |
-
|
| 554 |
-
|
| 555 |
-
|
| 556 |
-
|
| 557 |
-
|
| 558 |
-
|
| 559 |
-
|
| 560 |
-
|
| 561 |
-
|
| 562 |
-
|
|
|
|
|
|
|
| 563 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 564 |
textbox = textbox_component(
|
| 565 |
-
|
| 566 |
-
|
| 567 |
-
|
| 568 |
-
|
| 569 |
-
|
| 570 |
-
|
| 571 |
-
|
| 572 |
-
|
| 573 |
-
|
| 574 |
-
|
| 575 |
chatbot = gr.Chatbot(
|
| 576 |
-
|
| 577 |
-
|
| 578 |
-
|
| 579 |
-
|
| 580 |
-
|
| 581 |
-
|
| 582 |
-
|
| 583 |
-
with gr.Row():
|
| 584 |
-
followup_question_buttons = []
|
| 585 |
-
for i in range(FOLLOWUP_QUESTION_NUMBER):
|
| 586 |
-
btn = gr.Button(f"Button {i+1}", visible=False)
|
| 587 |
-
followup_question_buttons.append(btn)
|
| 588 |
-
|
| 589 |
-
|
| 590 |
|
| 591 |
-
|
| 592 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 593 |
)
|
| 594 |
-
|
|
|
|
| 595 |
with gr.Sidebar() as sidebar:
|
| 596 |
@gr.render(inputs=[tab_edit_uuid_state, end_of_assistant_response_state, sidebar_names_state, current_uuid_state, chatbot, offloaded_tabs_data_storage])
|
| 597 |
def render_chats(tab_uuid_edit, end_of_chat_response, sidebar_summaries, active_uuid, messages, tabs):
|
| 598 |
-
|
| 599 |
-
if
|
| 600 |
-
|
| 601 |
-
|
| 602 |
-
|
| 603 |
-
|
|
|
|
| 604 |
unique_id = f"current-tab-{active_uuid}-{uuid4()}"
|
| 605 |
gr.Button(
|
| 606 |
current_tab_button_text,
|
| 607 |
elem_classes=["chat-tab", "active"],
|
| 608 |
-
elem_id=unique_id
|
| 609 |
)
|
|
|
|
|
|
|
| 610 |
for chat_uuid, tab in reversed(tabs.items()):
|
|
|
|
|
|
|
|
|
|
| 611 |
elem_classes = ["chat-tab"]
|
| 612 |
if chat_uuid == active_uuid:
|
| 613 |
elem_classes.append("active")
|
|
|
|
| 614 |
button_uuid_state = gr.State(chat_uuid)
|
|
|
|
| 615 |
with gr.Row():
|
|
|
|
| 616 |
clear_tab_button = gr.Button(
|
| 617 |
"🗑",
|
| 618 |
scale=0,
|
| 619 |
elem_classes=["tab-button-control"],
|
| 620 |
-
elem_id=f"delete-btn-{chat_uuid}-{uuid4()}"
|
| 621 |
)
|
| 622 |
clear_tab_button.click(
|
| 623 |
fn=delete_tab,
|
|
@@ -630,27 +613,31 @@ if __name__ == "__main__":
|
|
| 630 |
outputs=[
|
| 631 |
sidebar_names_state,
|
| 632 |
offloaded_tabs_data_storage,
|
| 633 |
-
|
| 634 |
]
|
| 635 |
)
|
| 636 |
-
|
| 637 |
-
|
| 638 |
-
|
|
|
|
| 639 |
if chat_uuid != tab_uuid_edit:
|
|
|
|
| 640 |
set_edit_tab_button = gr.Button(
|
| 641 |
"✎",
|
| 642 |
scale=0,
|
| 643 |
elem_classes=["tab-button-control"],
|
| 644 |
-
elem_id=f"edit-btn-{chat_uuid}-{uuid4()}"
|
| 645 |
)
|
| 646 |
set_edit_tab_button.click(
|
| 647 |
fn=lambda x: x,
|
| 648 |
inputs=[button_uuid_state],
|
| 649 |
outputs=[tab_edit_uuid_state]
|
| 650 |
)
|
|
|
|
|
|
|
| 651 |
chat_tab_button = gr.Button(
|
| 652 |
chat_button_text,
|
| 653 |
-
elem_id=f"chat-{chat_uuid}-{uuid4()}",
|
| 654 |
elem_classes=elem_classes,
|
| 655 |
scale=2
|
| 656 |
)
|
|
@@ -667,19 +654,20 @@ if __name__ == "__main__":
|
|
| 667 |
outputs=[
|
| 668 |
current_langgraph_state,
|
| 669 |
current_uuid_state,
|
| 670 |
-
|
| 671 |
offloaded_tabs_data_storage,
|
| 672 |
prompt_textbox,
|
| 673 |
*followup_question_buttons
|
| 674 |
]
|
| 675 |
)
|
| 676 |
else:
|
|
|
|
| 677 |
chat_tab_text = gr.Textbox(
|
| 678 |
chat_button_text,
|
| 679 |
scale=2,
|
| 680 |
interactive=True,
|
| 681 |
show_label=False,
|
| 682 |
-
elem_id=f"edit-text-{chat_uuid}-{uuid4()}"
|
| 683 |
)
|
| 684 |
chat_tab_text.submit(
|
| 685 |
fn=submit_edit_tab,
|
|
@@ -693,11 +681,17 @@ if __name__ == "__main__":
|
|
| 693 |
tab_edit_uuid_state
|
| 694 |
]
|
| 695 |
)
|
| 696 |
-
|
| 697 |
-
|
| 698 |
-
new_chat_button = gr.Button("New Chat", elem_id="new-chat-button")
|
| 699 |
-
chatbot.clear(fn=clear, outputs=[current_langgraph_state, current_uuid_state])
|
| 700 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 701 |
chat_interface = gr.ChatInterface(
|
| 702 |
chatbot=chatbot,
|
| 703 |
fn=chat_fn,
|
|
@@ -717,6 +711,7 @@ if __name__ == "__main__":
|
|
| 717 |
textbox=textbox,
|
| 718 |
)
|
| 719 |
|
|
|
|
| 720 |
new_chat_button.click(
|
| 721 |
new_tab,
|
| 722 |
inputs=[
|
|
@@ -730,7 +725,7 @@ if __name__ == "__main__":
|
|
| 730 |
outputs=[
|
| 731 |
current_uuid_state,
|
| 732 |
current_langgraph_state,
|
| 733 |
-
|
| 734 |
offloaded_tabs_data_storage,
|
| 735 |
prompt_textbox,
|
| 736 |
sidebar_names_state,
|
|
@@ -738,12 +733,11 @@ if __name__ == "__main__":
|
|
| 738 |
]
|
| 739 |
)
|
| 740 |
|
| 741 |
-
|
| 742 |
def click_followup_button(btn):
|
| 743 |
buttons = [gr.Button(visible=False) for _ in range(len(followup_question_buttons))]
|
| 744 |
return btn, *buttons
|
| 745 |
|
| 746 |
-
|
| 747 |
for btn in followup_question_buttons:
|
| 748 |
btn.click(
|
| 749 |
fn=click_followup_button,
|
|
@@ -754,6 +748,7 @@ if __name__ == "__main__":
|
|
| 754 |
]
|
| 755 |
).success(lambda: None, js=TRIGGER_CHATINTERFACE_BUTTON)
|
| 756 |
|
|
|
|
| 757 |
chatbot.change(
|
| 758 |
fn=populate_followup_questions,
|
| 759 |
inputs=[
|
|
@@ -767,6 +762,7 @@ if __name__ == "__main__":
|
|
| 767 |
],
|
| 768 |
trigger_mode="multiple"
|
| 769 |
)
|
|
|
|
| 770 |
chatbot.change(
|
| 771 |
fn=summarize_chat,
|
| 772 |
inputs=[
|
|
@@ -781,6 +777,7 @@ if __name__ == "__main__":
|
|
| 781 |
],
|
| 782 |
trigger_mode="multiple"
|
| 783 |
)
|
|
|
|
| 784 |
chatbot.change(
|
| 785 |
fn=lambda x: x,
|
| 786 |
inputs=[chatbot],
|
|
@@ -788,68 +785,43 @@ if __name__ == "__main__":
|
|
| 788 |
trigger_mode="always_last"
|
| 789 |
)
|
| 790 |
|
| 791 |
-
|
| 792 |
-
|
| 793 |
-
|
| 794 |
-
|
| 795 |
-
],
|
| 796 |
-
outputs=[
|
| 797 |
-
chatbot_message_storage, # Update the stored messages with the greeting
|
| 798 |
-
is_new_user_for_greeting # Update the flag
|
| 799 |
-
]
|
| 800 |
)
|
| 801 |
def handle_initial_greeting_load(current_is_new_user_flag: bool, existing_chat_history: list):
|
| 802 |
-
"""
|
| 803 |
-
This function is called by the @app.load decorator above.
|
| 804 |
-
It decides whether to add a greeting to the chat history.
|
| 805 |
-
"""
|
| 806 |
-
# You can either put the logic directly here, or call the globally defined one.
|
| 807 |
-
# Option 1: Call the globally defined function (cleaner if it's complex)
|
| 808 |
-
# Make sure 'display_initial_greeting_on_load' is defined globally in your app.py
|
| 809 |
-
# For this example, I'm assuming 'display_initial_greeting_on_load' is the one we defined earlier:
|
| 810 |
-
# def display_initial_greeting_on_load(current_is_new_user_flag: bool, existing_chat_history: list):
|
| 811 |
-
# if current_is_new_user_flag:
|
| 812 |
-
# greeting_message_text = load_initial_greeting() # from graph.py
|
| 813 |
-
# greeting_entry = (None, greeting_message_text)
|
| 814 |
-
# if not isinstance(existing_chat_history, list): existing_chat_history = []
|
| 815 |
-
# updated_chat_history = [greeting_entry] + existing_chat_history
|
| 816 |
-
# updated_is_new_user_flag = False
|
| 817 |
-
# logger.info("Greeting added for new user.")
|
| 818 |
-
# return updated_chat_history, updated_is_new_user_flag
|
| 819 |
-
# else:
|
| 820 |
-
# logger.info("Not a new user or already greeted, no greeting added.")
|
| 821 |
-
# return existing_chat_history, False
|
| 822 |
-
#
|
| 823 |
-
# return display_initial_greeting_on_load(current_is_new_user_flag, existing_chat_history)
|
| 824 |
-
|
| 825 |
-
# Option 2: Put logic directly here (if simple enough)
|
| 826 |
if current_is_new_user_flag:
|
| 827 |
-
greeting_message_text = load_initial_greeting()
|
| 828 |
greeting_entry = {"role": "assistant", "content": greeting_message_text}
|
| 829 |
-
|
| 830 |
if not isinstance(existing_chat_history, list):
|
| 831 |
existing_chat_history = []
|
|
|
|
| 832 |
updated_chat_history = [greeting_entry] + existing_chat_history
|
| 833 |
updated_is_new_user_flag = False
|
| 834 |
-
logger.info("Greeting added for new user
|
| 835 |
return updated_chat_history, updated_is_new_user_flag
|
| 836 |
else:
|
| 837 |
-
logger.info("Not a new user or already greeted
|
|
|
|
|
|
|
| 838 |
return existing_chat_history, False
|
| 839 |
|
| 840 |
-
@demo.load(inputs=[chatbot_message_storage], outputs=[
|
| 841 |
def load_messages(messages):
|
| 842 |
-
|
|
|
|
|
|
|
|
|
|
| 843 |
|
| 844 |
@demo.load(inputs=[current_prompt_state], outputs=[prompt_textbox])
|
| 845 |
def load_prompt(current_prompt):
|
| 846 |
-
|
| 847 |
-
|
| 848 |
-
|
| 849 |
-
|
| 850 |
-
|
| 851 |
-
demo.launch(debug=True, share=True)
|
| 852 |
-
|
| 853 |
-
|
| 854 |
-
|
| 855 |
|
|
|
|
|
|
|
|
|
| 13 |
from pydantic import BaseModel
|
| 14 |
from pathlib import Path
|
| 15 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
load_dotenv()
|
| 17 |
|
|
|
|
|
|
|
| 18 |
# There are tools set here dependent on environment variables
|
| 19 |
from graph import graph, weak_model, search_enabled # noqa
|
| 20 |
|
|
|
|
| 39 |
with open(filepath, "r", encoding="utf-8") as f:
|
| 40 |
return f.read().strip()
|
| 41 |
except FileNotFoundError:
|
| 42 |
+
logger.warning(f"Warning: Prompt file '{filepath}' not found.")
|
| 43 |
+
return "Welcome to DIYO! I'm here to help you create amazing DIY projects. What would you like to build today?"
|
|
|
|
|
|
|
| 44 |
|
| 45 |
async def chat_fn(user_input: str, history: dict, input_graph_state: dict, uuid: UUID, prompt: str, search_enabled: bool, download_website_text_enabled: bool):
|
| 46 |
"""
|
|
|
|
| 54 |
str: The output message
|
| 55 |
dict|Any: The final state of the graph
|
| 56 |
bool|Any: Whether to trigger follow up questions
|
|
|
|
|
|
|
|
|
|
| 57 |
"""
|
| 58 |
try:
|
| 59 |
+
logger.info(f"Processing user input: {user_input[:100]}...")
|
| 60 |
+
|
| 61 |
+
# Initialize input_graph_state if None
|
| 62 |
+
if input_graph_state is None:
|
| 63 |
+
input_graph_state = {}
|
| 64 |
+
|
| 65 |
input_graph_state["tools_enabled"] = {
|
| 66 |
"download_website_text": download_website_text_enabled,
|
| 67 |
"tavily_search_results_json": search_enabled,
|
|
|
|
| 89 |
config = RunnableConfig(
|
| 90 |
recursion_limit=20,
|
| 91 |
run_name="user_chat",
|
| 92 |
+
configurable={"thread_id": str(uuid)}
|
| 93 |
)
|
| 94 |
|
| 95 |
output: str = ""
|
|
|
|
| 103 |
):
|
| 104 |
if stream_mode == "values":
|
| 105 |
final_state = chunk
|
| 106 |
+
if chunk.get("messages") and len(chunk["messages"]) > 0:
|
| 107 |
+
last_message = chunk["messages"][-1]
|
| 108 |
+
if hasattr(last_message, "tool_calls") and last_message.tool_calls:
|
| 109 |
+
for msg_tool_call in last_message.tool_calls:
|
| 110 |
+
tool_name: str = msg_tool_call['name']
|
| 111 |
+
|
| 112 |
+
if tool_name == "tavily_search_results_json":
|
| 113 |
+
query = msg_tool_call['args']['query']
|
| 114 |
+
waiting_output_seq.append(f"🔍 Searching for '{query}'...")
|
| 115 |
+
yield "\n".join(waiting_output_seq), gr.skip(), gr.skip()
|
| 116 |
+
|
| 117 |
+
elif tool_name == "download_website_text":
|
| 118 |
+
url = msg_tool_call['args']['url']
|
| 119 |
+
waiting_output_seq.append(f"📥 Downloading text from '{url}'...")
|
| 120 |
+
yield "\n".join(waiting_output_seq), gr.skip(), gr.skip()
|
| 121 |
+
|
| 122 |
+
elif tool_name == "human_assistance":
|
| 123 |
+
query = msg_tool_call["args"]["query"]
|
| 124 |
+
waiting_output_seq.append(f"🤖: {query}")
|
| 125 |
+
|
| 126 |
+
# Save state to resume after user provides input
|
| 127 |
+
final_state["awaiting_human_input"] = True
|
| 128 |
+
final_state["human_assistance_tool_id"] = msg_tool_call["id"]
|
| 129 |
+
|
| 130 |
+
# Indicate that human input is needed
|
| 131 |
+
yield "\n".join(waiting_output_seq), final_state, True
|
| 132 |
+
return # Pause execution, resume in next call
|
| 133 |
+
|
| 134 |
+
else:
|
| 135 |
+
waiting_output_seq.append(f"🔧 Running {tool_name}...")
|
| 136 |
+
yield "\n".join(waiting_output_seq), gr.skip(), gr.skip()
|
| 137 |
|
| 138 |
elif stream_mode == "messages":
|
| 139 |
msg, metadata = chunk
|
| 140 |
+
# Check for the correct node name from your graph
|
| 141 |
+
node_name = metadata.get('langgraph_node', '')
|
| 142 |
+
if node_name in ["brainstorming_node", "prompt_planning_node", "generate_3d_node", "assistant_node"]:
|
| 143 |
current_chunk_text = ""
|
| 144 |
if isinstance(msg.content, str):
|
| 145 |
current_chunk_text = msg.content
|
|
|
|
| 147 |
for block in msg.content:
|
| 148 |
if isinstance(block, dict) and block.get("type") == "text":
|
| 149 |
current_chunk_text += block.get("text", "")
|
| 150 |
+
elif isinstance(block, str):
|
| 151 |
current_chunk_text += block
|
| 152 |
|
| 153 |
+
if current_chunk_text:
|
|
|
|
| 154 |
output += current_chunk_text
|
| 155 |
yield output, gr.skip(), gr.skip()
|
| 156 |
|
| 157 |
+
# Final yield with complete response
|
|
|
|
|
|
|
| 158 |
yield output + " ", dict(final_state), True
|
| 159 |
+
|
| 160 |
+
except Exception as e:
|
| 161 |
+
logger.exception("Exception occurred in chat_fn")
|
| 162 |
user_error_message = "There was an error processing your request. Please try again."
|
| 163 |
yield user_error_message, gr.skip(), False
|
| 164 |
|
| 165 |
def clear():
|
| 166 |
+
"""Clear the current conversation state"""
|
| 167 |
return dict(), uuid4()
|
| 168 |
|
| 169 |
class FollowupQuestions(BaseModel):
|
|
|
|
| 173 |
async def populate_followup_questions(end_of_chat_response: bool, messages: dict[str, str], uuid: UUID):
|
| 174 |
"""
|
| 175 |
This function gets called a lot due to the asynchronous nature of streaming
|
|
|
|
| 176 |
Only populate followup questions if streaming has completed and the message is coming from the assistant
|
| 177 |
"""
|
| 178 |
+
if not end_of_chat_response or not messages or len(messages) == 0:
|
| 179 |
return *[gr.skip() for _ in range(FOLLOWUP_QUESTION_NUMBER)], False
|
| 180 |
+
|
| 181 |
+
# Check if the last message is from assistant
|
| 182 |
+
if messages[-1]["role"] != "assistant":
|
| 183 |
+
return *[gr.skip() for _ in range(FOLLOWUP_QUESTION_NUMBER)], False
|
| 184 |
+
|
| 185 |
+
try:
|
| 186 |
+
config = RunnableConfig(
|
| 187 |
+
run_name="populate_followup_questions",
|
| 188 |
+
configurable={"thread_id": str(uuid)}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 189 |
)
|
| 190 |
+
weak_model_with_config = weak_model.with_config(config)
|
| 191 |
+
follow_up_questions = await weak_model_with_config.with_structured_output(FollowupQuestions).ainvoke([
|
| 192 |
+
("system", f"suggest {FOLLOWUP_QUESTION_NUMBER} followup questions for the user to ask the assistant. Refrain from asking personal questions."),
|
| 193 |
+
*messages,
|
| 194 |
+
])
|
| 195 |
+
|
| 196 |
+
if len(follow_up_questions.questions) != FOLLOWUP_QUESTION_NUMBER:
|
| 197 |
+
logger.warning("Invalid number of followup questions generated")
|
| 198 |
+
return *[gr.Button(visible=False) for _ in range(FOLLOWUP_QUESTION_NUMBER)], False
|
| 199 |
+
|
| 200 |
+
buttons = []
|
| 201 |
+
for i in range(FOLLOWUP_QUESTION_NUMBER):
|
| 202 |
+
buttons.append(
|
| 203 |
+
gr.Button(follow_up_questions.questions[i], visible=True, elem_classes="chat-tab"),
|
| 204 |
+
)
|
| 205 |
+
return *buttons, False
|
| 206 |
+
|
| 207 |
+
except Exception as e:
|
| 208 |
+
logger.error(f"Error generating followup questions: {e}")
|
| 209 |
+
return *[gr.Button(visible=False) for _ in range(FOLLOWUP_QUESTION_NUMBER)], False
|
| 210 |
|
| 211 |
async def summarize_chat(end_of_chat_response: bool, messages: dict, sidebar_summaries: dict, uuid: UUID):
|
| 212 |
"""Summarize chat for tab names"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 213 |
should_return = (
|
| 214 |
not end_of_chat_response or
|
| 215 |
not messages or
|
| 216 |
+
len(messages) == 0 or
|
| 217 |
messages[-1]["role"] != "assistant" or
|
|
|
|
| 218 |
isinstance(sidebar_summaries, type(lambda x: x)) or
|
|
|
|
| 219 |
uuid in sidebar_summaries
|
| 220 |
)
|
| 221 |
if should_return:
|
| 222 |
return gr.skip(), gr.skip()
|
| 223 |
|
| 224 |
+
# Filter valid messages
|
| 225 |
filtered_messages = []
|
| 226 |
for msg in messages:
|
| 227 |
if isinstance(msg, dict) and msg.get("content") and msg["content"].strip():
|
|
|
|
| 230 |
# If we don't have any valid messages after filtering, provide a default summary
|
| 231 |
if not filtered_messages:
|
| 232 |
if uuid not in sidebar_summaries:
|
| 233 |
+
sidebar_summaries[uuid] = "New Chat"
|
| 234 |
return sidebar_summaries, False
|
| 235 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 236 |
try:
|
| 237 |
+
config = RunnableConfig(
|
| 238 |
+
run_name="summarize_chat",
|
| 239 |
+
configurable={"thread_id": str(uuid)}
|
| 240 |
+
)
|
| 241 |
weak_model_with_config = weak_model.with_config(config)
|
| 242 |
summary_response = await weak_model_with_config.ainvoke([
|
| 243 |
("system", "summarize this chat in 7 tokens or less. Refrain from using periods"),
|
|
|
|
| 245 |
])
|
| 246 |
|
| 247 |
if uuid not in sidebar_summaries:
|
| 248 |
+
sidebar_summaries[uuid] = summary_response.content[:50] # Limit length
|
| 249 |
+
|
| 250 |
except Exception as e:
|
| 251 |
logger.error(f"Error summarizing chat: {e}")
|
|
|
|
| 252 |
if uuid not in sidebar_summaries:
|
| 253 |
+
sidebar_summaries[uuid] = "Chat Session"
|
| 254 |
|
| 255 |
return sidebar_summaries, False
|
| 256 |
|
| 257 |
async def new_tab(uuid, gradio_graph, messages, tabs, prompt, sidebar_summaries):
|
| 258 |
+
"""Create a new chat tab"""
|
| 259 |
new_uuid = uuid4()
|
| 260 |
new_graph = {}
|
| 261 |
+
|
| 262 |
+
# Save current tab if it has content
|
| 263 |
+
if messages and len(messages) > 0:
|
| 264 |
+
if uuid not in sidebar_summaries:
|
| 265 |
+
sidebar_summaries, _ = await summarize_chat(True, messages, sidebar_summaries, uuid)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 266 |
tabs[uuid] = {
|
| 267 |
"graph": gradio_graph,
|
| 268 |
"messages": messages,
|
| 269 |
+
"prompt": prompt,
|
| 270 |
}
|
| 271 |
+
|
| 272 |
+
# Clear suggestion buttons
|
| 273 |
+
suggestion_buttons = [gr.Button(visible=False) for _ in range(FOLLOWUP_QUESTION_NUMBER)]
|
| 274 |
+
|
| 275 |
+
# Load initial greeting for new chat
|
| 276 |
+
greeting_text = load_initial_greeting()
|
| 277 |
+
new_chat_messages_for_display = [{"role": "assistant", "content": greeting_text}]
|
| 278 |
+
|
| 279 |
+
new_prompt = prompt if prompt else "You are a helpful DIY assistant."
|
| 280 |
+
|
| 281 |
+
return new_uuid, new_graph, new_chat_messages_for_display, tabs, new_prompt, sidebar_summaries, *suggestion_buttons
|
| 282 |
|
| 283 |
+
def switch_tab(selected_uuid, tabs, gradio_graph, uuid, messages, prompt):
|
| 284 |
+
"""Switch to a different chat tab"""
|
| 285 |
+
try:
|
| 286 |
+
# Save current state if there are messages
|
| 287 |
+
if messages and len(messages) > 0:
|
| 288 |
+
tabs[uuid] = {
|
| 289 |
+
"graph": gradio_graph if gradio_graph else {},
|
| 290 |
+
"messages": messages,
|
| 291 |
+
"prompt": prompt
|
| 292 |
+
}
|
| 293 |
+
|
| 294 |
+
if selected_uuid not in tabs:
|
| 295 |
+
logger.error(f"Could not find the selected tab in tabs storage: {selected_uuid}")
|
| 296 |
+
return gr.skip(), gr.skip(), gr.skip(), gr.skip(), gr.skip(), *[gr.Button(visible=False) for _ in range(FOLLOWUP_QUESTION_NUMBER)]
|
| 297 |
+
|
| 298 |
+
selected_tab_state = tabs[selected_uuid]
|
| 299 |
+
selected_graph = selected_tab_state.get("graph", {})
|
| 300 |
+
selected_messages = selected_tab_state.get("messages", [])
|
| 301 |
+
selected_prompt = selected_tab_state.get("prompt", "You are a helpful DIY assistant.")
|
| 302 |
+
|
| 303 |
+
suggestion_buttons = [gr.Button(visible=False) for _ in range(FOLLOWUP_QUESTION_NUMBER)]
|
| 304 |
+
|
| 305 |
+
return selected_graph, selected_uuid, selected_messages, tabs, selected_prompt, *suggestion_buttons
|
| 306 |
+
|
| 307 |
+
except Exception as e:
|
| 308 |
+
logger.error(f"Error switching tabs: {e}")
|
| 309 |
+
return gr.skip(), gr.skip(), gr.skip(), gr.skip(), gr.skip(), *[gr.Button(visible=False) for _ in range(FOLLOWUP_QUESTION_NUMBER)]
|
| 310 |
|
| 311 |
def delete_tab(current_chat_uuid, selected_uuid, sidebar_summaries, tabs):
|
| 312 |
+
"""Delete a chat tab"""
|
| 313 |
output_messages = gr.skip()
|
| 314 |
+
|
| 315 |
+
# If deleting the current tab, clear the chatbot
|
| 316 |
if current_chat_uuid == selected_uuid:
|
| 317 |
+
output_messages = []
|
| 318 |
+
|
| 319 |
+
# Remove from storage
|
| 320 |
if selected_uuid in tabs:
|
| 321 |
del tabs[selected_uuid]
|
| 322 |
if selected_uuid in sidebar_summaries:
|
| 323 |
del sidebar_summaries[selected_uuid]
|
| 324 |
+
|
| 325 |
return sidebar_summaries, tabs, output_messages
|
| 326 |
|
| 327 |
def submit_edit_tab(selected_uuid, sidebar_summaries, text):
|
| 328 |
+
"""Submit edited tab name"""
|
| 329 |
+
if text.strip():
|
| 330 |
+
sidebar_summaries[selected_uuid] = text.strip()[:50] # Limit length
|
| 331 |
return sidebar_summaries, ""
|
| 332 |
|
| 333 |
def load_mesh(mesh_file_name):
|
| 334 |
+
"""Load a 3D mesh file"""
|
| 335 |
+
return mesh_file_name
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 336 |
|
| 337 |
def get_sorted_3d_model_examples():
|
| 338 |
+
"""Get sorted list of 3D model examples"""
|
| 339 |
examples_dir = Path("./generated_3d_models")
|
| 340 |
+
|
| 341 |
+
# Create directory if it doesn't exist
|
| 342 |
+
examples_dir.mkdir(exist_ok=True)
|
| 343 |
+
|
| 344 |
if not examples_dir.exists():
|
| 345 |
return []
|
| 346 |
|
|
|
|
| 351 |
]
|
| 352 |
|
| 353 |
# Sort files by creation time (latest first)
|
| 354 |
+
try:
|
| 355 |
+
sorted_files = sorted(
|
| 356 |
+
model_files,
|
| 357 |
+
key=lambda x: x.stat().st_ctime,
|
| 358 |
+
reverse=True
|
| 359 |
+
)
|
| 360 |
+
except (OSError, AttributeError):
|
| 361 |
+
# Fallback to name sorting if stat fails
|
| 362 |
+
sorted_files = sorted(model_files, key=lambda x: x.name, reverse=True)
|
| 363 |
|
| 364 |
# Convert to format [[path1], [path2], ...]
|
| 365 |
return [[str(file)] for file in sorted_files]
|
| 366 |
|
|
|
|
| 367 |
CSS = """
|
| 368 |
footer {visibility: hidden}
|
| 369 |
.followup-question-button {font-size: 12px }
|
|
|
|
| 392 |
}
|
| 393 |
|
| 394 |
#main-app {
|
| 395 |
+
height: 4600px;
|
| 396 |
+
overflow-y: auto;
|
| 397 |
+
padding-top: 20px;
|
| 398 |
}
|
|
|
|
| 399 |
"""
|
| 400 |
|
|
|
|
| 401 |
TRIGGER_CHATINTERFACE_BUTTON = """
|
| 402 |
function triggerChatButtonClick() {
|
|
|
|
|
|
|
| 403 |
const chatTextbox = document.getElementById("chat-textbox");
|
|
|
|
| 404 |
if (!chatTextbox) {
|
| 405 |
console.error("Error: Could not find element with id 'chat-textbox'");
|
| 406 |
return;
|
| 407 |
}
|
|
|
|
|
|
|
| 408 |
const button = chatTextbox.querySelector("button");
|
|
|
|
| 409 |
if (!button) {
|
| 410 |
console.error("Error: No button found inside the chat-textbox element");
|
| 411 |
return;
|
| 412 |
}
|
|
|
|
|
|
|
| 413 |
button.click();
|
| 414 |
}"""
|
| 415 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 416 |
if __name__ == "__main__":
|
| 417 |
+
logger.info("Starting the DIYO interface")
|
| 418 |
+
|
| 419 |
+
with gr.Blocks(title="DIYO - DIY Assistant", fill_height=True, css=CSS, elem_id="main-app") as demo:
|
| 420 |
+
# State management
|
| 421 |
is_new_user_for_greeting = gr.State(True)
|
| 422 |
+
|
| 423 |
current_prompt_state = gr.BrowserState(
|
| 424 |
+
value="You are a helpful DIY assistant.",
|
| 425 |
storage_key="current_prompt_state",
|
| 426 |
secret=BROWSER_STORAGE_SECRET,
|
| 427 |
)
|
| 428 |
current_uuid_state = gr.BrowserState(
|
| 429 |
+
value=uuid4,
|
| 430 |
storage_key="current_uuid_state",
|
| 431 |
secret=BROWSER_STORAGE_SECRET,
|
| 432 |
)
|
| 433 |
current_langgraph_state = gr.BrowserState(
|
| 434 |
+
value=dict,
|
| 435 |
storage_key="current_langgraph_state",
|
| 436 |
secret=BROWSER_STORAGE_SECRET,
|
| 437 |
)
|
| 438 |
+
end_of_assistant_response_state = gr.State(False)
|
| 439 |
+
|
|
|
|
| 440 |
# [uuid] -> summary of chat
|
| 441 |
sidebar_names_state = gr.BrowserState(
|
| 442 |
+
value=dict,
|
| 443 |
storage_key="sidebar_names_state",
|
| 444 |
secret=BROWSER_STORAGE_SECRET,
|
| 445 |
)
|
| 446 |
# [uuid] -> {"graph": gradio_graph, "messages": messages}
|
| 447 |
offloaded_tabs_data_storage = gr.BrowserState(
|
| 448 |
+
value=dict,
|
| 449 |
storage_key="offloaded_tabs_data_storage",
|
| 450 |
secret=BROWSER_STORAGE_SECRET,
|
| 451 |
)
|
|
|
|
| 452 |
chatbot_message_storage = gr.BrowserState(
|
| 453 |
+
value=list,
|
| 454 |
storage_key="chatbot_message_storage",
|
| 455 |
secret=BROWSER_STORAGE_SECRET,
|
| 456 |
)
|
| 457 |
|
| 458 |
+
# Header
|
| 459 |
with gr.Row(elem_classes="header-margin"):
|
|
|
|
| 460 |
gr.Markdown("""
|
| 461 |
<div style="display: flex; align-items: center; justify-content: center; text-align: center; padding: 20px; background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); border-radius: 15px; margin-bottom: 20px; color: white; box-shadow: 0 4px 15px rgba(0,0,0,0.2);">
|
| 462 |
+
<h1>🔧 DIYO - Your DIY Assistant 🛠️</h1>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 463 |
</div>
|
| 464 |
""")
|
| 465 |
|
| 466 |
+
# System prompt input
|
| 467 |
with gr.Row():
|
| 468 |
+
prompt_textbox = gr.Textbox(
|
| 469 |
+
label="System Prompt",
|
| 470 |
+
value="You are a helpful DIY assistant.",
|
| 471 |
+
show_label=True,
|
| 472 |
+
interactive=True,
|
| 473 |
+
placeholder="Enter custom system prompt..."
|
| 474 |
+
)
|
| 475 |
|
| 476 |
+
# Tool settings
|
| 477 |
with gr.Row():
|
| 478 |
checkbox_search_enabled = gr.Checkbox(
|
| 479 |
value=True,
|
| 480 |
+
label="Enable web search",
|
| 481 |
show_label=True,
|
| 482 |
visible=search_enabled,
|
| 483 |
scale=1,
|
|
|
|
| 485 |
checkbox_download_website_text = gr.Checkbox(
|
| 486 |
value=True,
|
| 487 |
show_label=True,
|
| 488 |
+
label="Enable downloading text from URLs",
|
| 489 |
scale=1,
|
| 490 |
)
|
| 491 |
+
|
| 492 |
+
# 3D Model display and controls
|
| 493 |
with gr.Row():
|
| 494 |
with gr.Column(scale=2):
|
| 495 |
model_3d_output = gr.Model3D(
|
| 496 |
clear_color=[0.0, 0.0, 0.0, 0.0],
|
| 497 |
+
label="3D Model Viewer",
|
| 498 |
+
height=400
|
| 499 |
)
|
| 500 |
with gr.Column(scale=1):
|
|
|
|
|
|
|
| 501 |
model_3d_upload_button = gr.UploadButton(
|
| 502 |
+
"📁 Upload 3D Model (.obj, .glb, .gltf)",
|
| 503 |
file_types=[".obj", ".glb", ".gltf"],
|
|
|
|
| 504 |
)
|
| 505 |
model_3d_upload_button.upload(
|
| 506 |
fn=load_mesh,
|
| 507 |
inputs=model_3d_upload_button,
|
| 508 |
outputs=model_3d_output
|
| 509 |
)
|
| 510 |
+
|
| 511 |
+
# Examples with error handling
|
| 512 |
+
try:
|
| 513 |
+
examples_list = get_sorted_3d_model_examples()
|
| 514 |
+
if examples_list:
|
| 515 |
+
gr.Examples(
|
| 516 |
+
label="Example 3D Models",
|
| 517 |
+
examples=examples_list,
|
| 518 |
+
inputs=model_3d_upload_button,
|
| 519 |
+
outputs=model_3d_output,
|
| 520 |
+
fn=load_mesh,
|
| 521 |
+
cache_examples=False
|
| 522 |
+
)
|
| 523 |
+
except Exception as e:
|
| 524 |
+
logger.error(f"Error setting up 3D model examples: {e}")
|
| 525 |
|
| 526 |
+
# Chat interface setup
|
| 527 |
+
with gr.Row():
|
| 528 |
+
multimodal = False
|
| 529 |
+
textbox_component = gr.MultimodalTextbox if multimodal else gr.Textbox
|
| 530 |
+
|
| 531 |
textbox = textbox_component(
|
| 532 |
+
show_label=False,
|
| 533 |
+
label="Message",
|
| 534 |
+
placeholder="Type a message...",
|
| 535 |
+
scale=1,
|
| 536 |
+
autofocus=True,
|
| 537 |
+
submit_btn=True,
|
| 538 |
+
stop_btn=True,
|
| 539 |
+
elem_id="chat-textbox",
|
| 540 |
+
lines=1,
|
| 541 |
+
)
|
| 542 |
chatbot = gr.Chatbot(
|
| 543 |
+
type="messages",
|
| 544 |
+
scale=0,
|
| 545 |
+
show_copy_button=True,
|
| 546 |
+
height=400,
|
| 547 |
+
editable="all",
|
| 548 |
+
elem_classes="main-chatbox"
|
| 549 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 550 |
|
| 551 |
+
# Follow-up question buttons
|
| 552 |
+
with gr.Row():
|
| 553 |
+
followup_question_buttons = []
|
| 554 |
+
for i in range(FOLLOWUP_QUESTION_NUMBER):
|
| 555 |
+
btn = gr.Button(f"Button {i+1}", visible=False, elem_classes="followup-question-button")
|
| 556 |
+
followup_question_buttons.append(btn)
|
| 557 |
+
|
| 558 |
+
# Tab management state
|
| 559 |
+
tab_edit_uuid_state = gr.State("")
|
| 560 |
+
|
| 561 |
+
# Update prompt state when changed
|
| 562 |
+
prompt_textbox.change(
|
| 563 |
+
fn=lambda prompt: prompt,
|
| 564 |
+
inputs=[prompt_textbox],
|
| 565 |
+
outputs=[current_prompt_state]
|
| 566 |
)
|
| 567 |
+
|
| 568 |
+
# Sidebar with chat history
|
| 569 |
with gr.Sidebar() as sidebar:
|
| 570 |
@gr.render(inputs=[tab_edit_uuid_state, end_of_assistant_response_state, sidebar_names_state, current_uuid_state, chatbot, offloaded_tabs_data_storage])
|
| 571 |
def render_chats(tab_uuid_edit, end_of_chat_response, sidebar_summaries, active_uuid, messages, tabs):
|
| 572 |
+
# Ensure sidebar_summaries is a dict
|
| 573 |
+
if not isinstance(sidebar_summaries, dict):
|
| 574 |
+
sidebar_summaries = {}
|
| 575 |
+
|
| 576 |
+
# Current tab button
|
| 577 |
+
current_tab_button_text = sidebar_summaries.get(active_uuid, "Current Chat")
|
| 578 |
+
if active_uuid not in tabs or not tabs[active_uuid]:
|
| 579 |
unique_id = f"current-tab-{active_uuid}-{uuid4()}"
|
| 580 |
gr.Button(
|
| 581 |
current_tab_button_text,
|
| 582 |
elem_classes=["chat-tab", "active"],
|
| 583 |
+
elem_id=unique_id
|
| 584 |
)
|
| 585 |
+
|
| 586 |
+
# Historical tabs
|
| 587 |
for chat_uuid, tab in reversed(tabs.items()):
|
| 588 |
+
if not tab: # Skip empty tabs
|
| 589 |
+
continue
|
| 590 |
+
|
| 591 |
elem_classes = ["chat-tab"]
|
| 592 |
if chat_uuid == active_uuid:
|
| 593 |
elem_classes.append("active")
|
| 594 |
+
|
| 595 |
button_uuid_state = gr.State(chat_uuid)
|
| 596 |
+
|
| 597 |
with gr.Row():
|
| 598 |
+
# Delete button
|
| 599 |
clear_tab_button = gr.Button(
|
| 600 |
"🗑",
|
| 601 |
scale=0,
|
| 602 |
elem_classes=["tab-button-control"],
|
| 603 |
+
elem_id=f"delete-btn-{chat_uuid}-{uuid4()}"
|
| 604 |
)
|
| 605 |
clear_tab_button.click(
|
| 606 |
fn=delete_tab,
|
|
|
|
| 613 |
outputs=[
|
| 614 |
sidebar_names_state,
|
| 615 |
offloaded_tabs_data_storage,
|
| 616 |
+
chatbot
|
| 617 |
]
|
| 618 |
)
|
| 619 |
+
|
| 620 |
+
# Tab name/edit functionality
|
| 621 |
+
chat_button_text = sidebar_summaries.get(chat_uuid, str(chat_uuid)[:8])
|
| 622 |
+
|
| 623 |
if chat_uuid != tab_uuid_edit:
|
| 624 |
+
# Edit button
|
| 625 |
set_edit_tab_button = gr.Button(
|
| 626 |
"✎",
|
| 627 |
scale=0,
|
| 628 |
elem_classes=["tab-button-control"],
|
| 629 |
+
elem_id=f"edit-btn-{chat_uuid}-{uuid4()}"
|
| 630 |
)
|
| 631 |
set_edit_tab_button.click(
|
| 632 |
fn=lambda x: x,
|
| 633 |
inputs=[button_uuid_state],
|
| 634 |
outputs=[tab_edit_uuid_state]
|
| 635 |
)
|
| 636 |
+
|
| 637 |
+
# Tab button
|
| 638 |
chat_tab_button = gr.Button(
|
| 639 |
chat_button_text,
|
| 640 |
+
elem_id=f"chat-{chat_uuid}-{uuid4()}",
|
| 641 |
elem_classes=elem_classes,
|
| 642 |
scale=2
|
| 643 |
)
|
|
|
|
| 654 |
outputs=[
|
| 655 |
current_langgraph_state,
|
| 656 |
current_uuid_state,
|
| 657 |
+
chatbot,
|
| 658 |
offloaded_tabs_data_storage,
|
| 659 |
prompt_textbox,
|
| 660 |
*followup_question_buttons
|
| 661 |
]
|
| 662 |
)
|
| 663 |
else:
|
| 664 |
+
# Edit textbox
|
| 665 |
chat_tab_text = gr.Textbox(
|
| 666 |
chat_button_text,
|
| 667 |
scale=2,
|
| 668 |
interactive=True,
|
| 669 |
show_label=False,
|
| 670 |
+
elem_id=f"edit-text-{chat_uuid}-{uuid4()}"
|
| 671 |
)
|
| 672 |
chat_tab_text.submit(
|
| 673 |
fn=submit_edit_tab,
|
|
|
|
| 681 |
tab_edit_uuid_state
|
| 682 |
]
|
| 683 |
)
|
| 684 |
+
|
| 685 |
+
# New chat button
|
| 686 |
+
new_chat_button = gr.Button("➕ New Chat", elem_id="new-chat-button")
|
|
|
|
| 687 |
|
| 688 |
+
# Clear functionality
|
| 689 |
+
chatbot.clear(
|
| 690 |
+
fn=clear,
|
| 691 |
+
outputs=[current_langgraph_state, current_uuid_state]
|
| 692 |
+
)
|
| 693 |
+
|
| 694 |
+
# Main chat interface
|
| 695 |
chat_interface = gr.ChatInterface(
|
| 696 |
chatbot=chatbot,
|
| 697 |
fn=chat_fn,
|
|
|
|
| 711 |
textbox=textbox,
|
| 712 |
)
|
| 713 |
|
| 714 |
+
# New chat button functionality
|
| 715 |
new_chat_button.click(
|
| 716 |
new_tab,
|
| 717 |
inputs=[
|
|
|
|
| 725 |
outputs=[
|
| 726 |
current_uuid_state,
|
| 727 |
current_langgraph_state,
|
| 728 |
+
chatbot,
|
| 729 |
offloaded_tabs_data_storage,
|
| 730 |
prompt_textbox,
|
| 731 |
sidebar_names_state,
|
|
|
|
| 733 |
]
|
| 734 |
)
|
| 735 |
|
| 736 |
+
# Follow-up button functionality
|
| 737 |
def click_followup_button(btn):
|
| 738 |
buttons = [gr.Button(visible=False) for _ in range(len(followup_question_buttons))]
|
| 739 |
return btn, *buttons
|
| 740 |
|
|
|
|
| 741 |
for btn in followup_question_buttons:
|
| 742 |
btn.click(
|
| 743 |
fn=click_followup_button,
|
|
|
|
| 748 |
]
|
| 749 |
).success(lambda: None, js=TRIGGER_CHATINTERFACE_BUTTON)
|
| 750 |
|
| 751 |
+
# Event handlers for chatbot changes
|
| 752 |
chatbot.change(
|
| 753 |
fn=populate_followup_questions,
|
| 754 |
inputs=[
|
|
|
|
| 762 |
],
|
| 763 |
trigger_mode="multiple"
|
| 764 |
)
|
| 765 |
+
|
| 766 |
chatbot.change(
|
| 767 |
fn=summarize_chat,
|
| 768 |
inputs=[
|
|
|
|
| 777 |
],
|
| 778 |
trigger_mode="multiple"
|
| 779 |
)
|
| 780 |
+
|
| 781 |
chatbot.change(
|
| 782 |
fn=lambda x: x,
|
| 783 |
inputs=[chatbot],
|
|
|
|
| 785 |
trigger_mode="always_last"
|
| 786 |
)
|
| 787 |
|
| 788 |
+
# Load event handlers
|
| 789 |
+
@demo.load(
|
| 790 |
+
inputs=[is_new_user_for_greeting, chatbot_message_storage],
|
| 791 |
+
outputs=[chatbot_message_storage, is_new_user_for_greeting]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 792 |
)
|
| 793 |
def handle_initial_greeting_load(current_is_new_user_flag: bool, existing_chat_history: list):
|
| 794 |
+
"""Handle initial greeting when the app loads"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 795 |
if current_is_new_user_flag:
|
| 796 |
+
greeting_message_text = load_initial_greeting()
|
| 797 |
greeting_entry = {"role": "assistant", "content": greeting_message_text}
|
| 798 |
+
|
| 799 |
if not isinstance(existing_chat_history, list):
|
| 800 |
existing_chat_history = []
|
| 801 |
+
|
| 802 |
updated_chat_history = [greeting_entry] + existing_chat_history
|
| 803 |
updated_is_new_user_flag = False
|
| 804 |
+
logger.info("Greeting added for new user.")
|
| 805 |
return updated_chat_history, updated_is_new_user_flag
|
| 806 |
else:
|
| 807 |
+
logger.info("Not a new user or already greeted.")
|
| 808 |
+
if not isinstance(existing_chat_history, list):
|
| 809 |
+
existing_chat_history = []
|
| 810 |
return existing_chat_history, False
|
| 811 |
|
| 812 |
+
@demo.load(inputs=[chatbot_message_storage], outputs=[chatbot])
|
| 813 |
def load_messages(messages):
|
| 814 |
+
"""Load stored messages into chatbot"""
|
| 815 |
+
if isinstance(messages, list):
|
| 816 |
+
return messages
|
| 817 |
+
return []
|
| 818 |
|
| 819 |
@demo.load(inputs=[current_prompt_state], outputs=[prompt_textbox])
|
| 820 |
def load_prompt(current_prompt):
|
| 821 |
+
"""Load stored prompt"""
|
| 822 |
+
if current_prompt:
|
| 823 |
+
return current_prompt
|
| 824 |
+
return "You are a helpful DIY assistant."
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 825 |
|
| 826 |
+
# Launch the application
|
| 827 |
+
demo.launch(debug=True, share=True)
|