muzakkirhussain011 commited on
Commit
b3af596
Β·
1 Parent(s): 1aef247

Add application files

Browse files
Files changed (1) hide show
  1. app.py +214 -147
app.py CHANGED
@@ -1279,9 +1279,9 @@ After the tool completes, provide a summary of:
1279
  # AI CHAT - With MCP Tool Support
1280
  # ============================================================================
1281
  async def chat_with_ai_async(message: str, history: list, hf_token: str):
1282
- """AI Chat with full MCP tool support - runs as async generator"""
1283
  if not knowledge_base["client"]["name"]:
1284
- yield history + [[message, "⚠️ Please complete Setup first."]], ""
1285
  return
1286
 
1287
  if not message.strip():
@@ -1294,126 +1294,172 @@ async def chat_with_ai_async(message: str, history: list, hf_token: str):
1294
  return
1295
 
1296
  client_name = knowledge_base["client"]["name"]
1297
- msg_lower = message.lower()
1298
 
1299
- # Check if this needs AI agent (complex task) or can be answered locally
1300
- needs_agent = any(kw in msg_lower for kw in [
1301
- "search", "find", "research", "look up", "discover",
1302
- "draft", "write", "compose", "create email", "send",
1303
- "save", "add", "store",
1304
- "analyze", "suggest", "recommend", "strategy",
1305
- "new prospect", "find companies", "find contacts"
1306
- ])
1307
-
1308
- if needs_agent:
1309
- # Use AI Agent with MCP tools
1310
- try:
1311
- agent = AutonomousMCPAgentHF(
1312
- mcp_registry=mcp_registry,
1313
- hf_token=token,
1314
- provider=HF_PROVIDER,
1315
- model=HF_MODEL
1316
- )
1317
-
1318
- # Build context-aware prompt
1319
- prospects_info = "\n".join([f"- {p['name']} ({p.get('industry', 'Unknown')})" for p in knowledge_base["prospects"][:5]]) if knowledge_base["prospects"] else "None yet"
1320
- contacts_info = "\n".join([f"- {c['name']} at {c.get('company', 'Unknown')}" for c in knowledge_base["contacts"][:5]]) if knowledge_base["contacts"] else "None yet"
1321
-
1322
- task = f"""You are a sales assistant for {client_name}.
1323
-
1324
- Current Knowledge Base:
1325
- - Prospects: {len(knowledge_base['prospects'])}
1326
- {prospects_info}
1327
- - Contacts: {len(knowledge_base['contacts'])}
1328
- {contacts_info}
1329
- - Emails drafted: {len(knowledge_base['emails'])}
1330
-
1331
- User request: {message}
1332
-
1333
- Use the available tools to help. Available actions:
1334
- - search_web: Search for company information
1335
- - save_prospect: Save a new prospect
1336
- - save_contact: Save a contact/decision maker
1337
- - send_email: Draft an outreach email
1338
- - save_fact: Save important facts about a company
1339
-
1340
- Complete the user's request and provide a helpful response."""
1341
-
1342
- response_text = ""
1343
- current_history = history + [[message, "πŸ€– Working on it..."]]
1344
- yield current_history, ""
1345
-
1346
- async for event in agent.run(task, max_iterations=10):
1347
- event_type = event.get("type")
1348
-
1349
- if event_type == "tool_call":
1350
- tool = event.get("tool", "")
1351
- response_text += f"πŸ”§ Using **{tool}**...\n"
1352
- current_history = history + [[message, response_text]]
1353
- yield current_history, ""
1354
-
1355
- elif event_type == "tool_result":
1356
- tool = event.get("tool", "")
1357
- result = event.get("result", {})
1358
-
1359
- # Capture data from tool results (with deduplication)
1360
- if tool == "save_prospect" and isinstance(result, dict):
1361
- prospect_data = {
1362
- "name": result.get("prospect_id", "Unknown"),
1363
- "domain": result.get("domain", ""),
1364
- "fit_score": 0,
1365
- "research_complete": True,
1366
- "discovered_at": datetime.now().strftime("%Y-%m-%d %H:%M")
1367
- }
1368
- # Deduplicate before adding
1369
- merge_to_knowledge_base([prospect_data], [], [])
1370
-
1371
- elif tool == "save_contact" and isinstance(result, dict):
1372
- contact_data = result
1373
- merge_to_knowledge_base([], [contact_data], [])
1374
-
1375
- elif tool == "send_email" and isinstance(result, dict):
1376
- merge_to_knowledge_base([], [], [result])
1377
-
1378
- response_text += f"βœ… {tool} completed\n"
1379
- current_history = history + [[message, response_text]]
1380
- yield current_history, ""
1381
-
1382
- elif event_type == "thought":
1383
- thought = event.get("thought", "")
1384
- if thought and not thought.startswith("[Processing"):
1385
- response_text += f"\n{thought}\n"
1386
- current_history = history + [[message, response_text]]
1387
- yield current_history, ""
1388
-
1389
- elif event_type == "agent_complete":
1390
- final = event.get("final_answer", "")
1391
- if final:
1392
- response_text += f"\n---\n{final}"
1393
- current_history = history + [[message, response_text]]
1394
- yield current_history, ""
1395
- return
1396
-
1397
- elif event_type == "agent_error":
1398
- response_text += f"\n⚠️ Error: {event.get('error', 'Unknown error')}"
1399
- current_history = history + [[message, response_text]]
1400
- yield current_history, ""
1401
- return
1402
-
1403
- # If we get here without returning
1404
- if not response_text:
1405
- response_text = "I processed your request but couldn't generate a response."
1406
- yield history + [[message, response_text]], ""
1407
- return
1408
-
1409
- except Exception as e:
1410
- logger.error(f"Chat agent error: {e}")
1411
- yield history + [[message, f"⚠️ Error: {str(e)}"]], ""
1412
- return
1413
 
1414
- # For simple queries, use local knowledge base lookup
1415
- response = get_local_response(message, client_name)
1416
- yield history + [[message, response]], ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1417
 
1418
 
1419
  def chat_with_ai(message: str, history: list) -> tuple:
@@ -2192,54 +2238,67 @@ def create_app():
2192
  with gr.Column(scale=1):
2193
  gr.HTML("""
2194
  <div class="action-card">
2195
- <h3>πŸ’‘ Example Prompts</h3>
2196
- <p style="font-size: 13px; color: var(--text-secondary);">Try these commands:</p>
 
 
 
 
 
 
 
 
 
 
 
 
2197
  </div>
2198
  """)
2199
 
2200
  gr.HTML("""
2201
  <div style="background: var(--bg-tertiary); padding: 12px; border-radius: 8px; margin-bottom: 8px;">
2202
- <strong>πŸ“‹ Pipeline Overview</strong>
2203
- <ul style="margin: 8px 0 0 0; padding-left: 16px; font-size: 12px; line-height: 1.6;">
2204
- <li>"List all prospects"</li>
2205
- <li>"Show me the contacts"</li>
2206
- <li>"How many emails have been drafted?"</li>
2207
- <li>"Give me a pipeline summary"</li>
2208
  </ul>
2209
  </div>
2210
  """)
2211
 
2212
  gr.HTML("""
2213
  <div style="background: var(--bg-tertiary); padding: 12px; border-radius: 8px; margin-bottom: 8px;">
2214
- <strong>πŸ” Research & Analysis</strong>
2215
- <ul style="margin: 8px 0 0 0; padding-left: 16px; font-size: 12px; line-height: 1.6;">
2216
- <li>"Research Acme Corp"</li>
2217
- <li>"What do we know about Nike?"</li>
2218
- <li>"Analyze Buffalo Jackson's fit score"</li>
2219
- <li>"Find similar companies to Warby Parker"</li>
2220
  </ul>
2221
  </div>
2222
  """)
2223
 
2224
  gr.HTML("""
2225
  <div style="background: var(--bg-tertiary); padding: 12px; border-radius: 8px; margin-bottom: 8px;">
2226
- <strong>βœ‰οΈ Custom Email Drafts</strong>
2227
- <ul style="margin: 8px 0 0 0; padding-left: 16px; font-size: 12px; line-height: 1.6;">
2228
- <li>"Draft a meeting request to Allbirds for Tuesday 2pm, 30 mins"</li>
2229
- <li>"Write a follow-up email to Glossier about our Q4 partnership proposal"</li>
2230
- <li>"Compose an intro email to Patagonia mentioning their sustainability initiatives"</li>
 
2231
  </ul>
2232
  </div>
2233
  """)
2234
 
2235
  gr.HTML("""
2236
  <div style="background: var(--bg-tertiary); padding: 12px; border-radius: 8px;">
2237
- <strong>πŸ’‘ Sales Preparation</strong>
2238
- <ul style="margin: 8px 0 0 0; padding-left: 16px; font-size: 12px; line-height: 1.6;">
2239
- <li>"Give me talking points for the Everlane call"</li>
2240
- <li>"Who is the decision maker at Bombas?"</li>
2241
- <li>"What's Casper's industry and fit reason?"</li>
2242
- <li>"Show the existing email draft for Away"</li>
2243
  </ul>
2244
  </div>
2245
  """)
@@ -2296,8 +2355,16 @@ def create_app():
2296
  generate_packet_btn.click(fn=generate_handoff_packet, inputs=[prospect_dropdown], outputs=[handoff_output])
2297
  refresh_dropdown_btn.click(fn=lambda: gr.Dropdown(choices=get_prospect_choices()), outputs=[prospect_dropdown])
2298
 
2299
- send_btn.click(fn=chat_with_ai, inputs=[chat_input, chatbot], outputs=[chatbot, chat_input])
2300
- chat_input.submit(fn=chat_with_ai, inputs=[chat_input, chatbot], outputs=[chatbot, chat_input])
 
 
 
 
 
 
 
 
2301
 
2302
  return demo
2303
 
 
1279
  # AI CHAT - With MCP Tool Support
1280
  # ============================================================================
1281
  async def chat_with_ai_async(message: str, history: list, hf_token: str):
1282
+ """AI Chat powered by LLM with full MCP tool support"""
1283
  if not knowledge_base["client"]["name"]:
1284
+ yield history + [[message, "⚠️ Please complete Setup first. Enter your company name in the Setup tab."]], ""
1285
  return
1286
 
1287
  if not message.strip():
 
1294
  return
1295
 
1296
  client_name = knowledge_base["client"]["name"]
1297
+ client_info = knowledge_base["client"].get("raw_research", "")
1298
 
1299
+ # Always use LLM for all queries - this is a full AI assistant
1300
+ try:
1301
+ agent = AutonomousMCPAgentHF(
1302
+ mcp_registry=mcp_registry,
1303
+ hf_token=token,
1304
+ provider=HF_PROVIDER,
1305
+ model=HF_MODEL
1306
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1307
 
1308
+ # Build comprehensive context with all knowledge base data
1309
+ prospects_detail = ""
1310
+ if knowledge_base["prospects"]:
1311
+ for i, p in enumerate(knowledge_base["prospects"][:10], 1):
1312
+ p_name = p.get('name', 'Unknown')
1313
+ p_name_lower = p_name.lower()
1314
+ # Get contacts for this prospect
1315
+ p_contacts = [c for c in knowledge_base["contacts"]
1316
+ if p_name_lower in c.get("company", "").lower()
1317
+ or c.get("company", "").lower() in p_name_lower]
1318
+ contacts_str = ", ".join([f"{c.get('name')} ({c.get('email')})" for c in p_contacts]) if p_contacts else "No contacts"
1319
+ prospects_detail += f"{i}. {p_name} - {p.get('industry', 'Unknown industry')}, Fit: {p.get('fit_score', 'N/A')}\n"
1320
+ prospects_detail += f" Summary: {p.get('summary', 'No summary')[:100]}\n"
1321
+ prospects_detail += f" Contacts: {contacts_str}\n"
1322
+ else:
1323
+ prospects_detail = "No prospects discovered yet."
1324
+
1325
+ emails_detail = ""
1326
+ if knowledge_base["emails"]:
1327
+ for e in knowledge_base["emails"][:5]:
1328
+ emails_detail += f"- To: {e.get('to')} | Subject: {e.get('subject', 'No subject')[:50]}\n"
1329
+ else:
1330
+ emails_detail = "No emails drafted yet."
1331
+
1332
+ task = f"""You are an AI sales assistant for {client_name}. You are a helpful, knowledgeable assistant that can answer any question about the sales pipeline, prospects, contacts, and help with various sales tasks.
1333
+
1334
+ ABOUT {client_name}:
1335
+ {client_info[:500] if client_info else "No company research available yet."}
1336
+
1337
+ CURRENT SALES PIPELINE:
1338
+ ======================
1339
+ PROSPECTS ({len(knowledge_base['prospects'])}):
1340
+ {prospects_detail}
1341
+
1342
+ CONTACTS ({len(knowledge_base['contacts'])}):
1343
+ {len(knowledge_base['contacts'])} decision makers found across prospects.
1344
+
1345
+ DRAFTED EMAILS ({len(knowledge_base['emails'])}):
1346
+ {emails_detail}
1347
+
1348
+ USER MESSAGE: {message}
1349
+
1350
+ INSTRUCTIONS:
1351
+ - Answer the user's question helpfully and completely
1352
+ - If they ask about prospects, contacts, or emails, use the data above
1353
+ - If they ask you to search for something, use search_web tool
1354
+ - If they ask you to draft an email, create a professional, personalized email
1355
+ - If they ask for talking points, strategies, or recommendations, provide thoughtful, specific advice
1356
+ - If they ask to find similar companies or new prospects, use search_web to research
1357
+ - Be conversational and helpful - you're a knowledgeable sales assistant
1358
+ - Don't say "I don't have that capability" - try to help with whatever they ask
1359
+ - For follow-up questions, use context from the conversation
1360
+
1361
+ Respond naturally and helpfully to the user's message."""
1362
+
1363
+ response_text = ""
1364
+ current_history = history + [[message, "πŸ€– Thinking..."]]
1365
+ yield current_history, ""
1366
+
1367
+ async for event in agent.run(task, max_iterations=12):
1368
+ event_type = event.get("type")
1369
+
1370
+ if event_type == "tool_call":
1371
+ tool = event.get("tool", "")
1372
+ tool_input = event.get("input", {})
1373
+ if tool == "search_web":
1374
+ query = tool_input.get("query", "") if isinstance(tool_input, dict) else ""
1375
+ response_text += f"πŸ” Searching: {query[:50]}...\n"
1376
+ elif tool == "send_email":
1377
+ response_text += f"βœ‰οΈ Drafting email...\n"
1378
+ else:
1379
+ response_text += f"πŸ”§ Using {tool}...\n"
1380
+ current_history = history + [[message, response_text]]
1381
+ yield current_history, ""
1382
+
1383
+ elif event_type == "tool_result":
1384
+ tool = event.get("tool", "")
1385
+ result = event.get("result", {})
1386
+
1387
+ # Capture data from tool results (with deduplication)
1388
+ if tool == "save_prospect" and isinstance(result, dict):
1389
+ prospect_data = {
1390
+ "name": result.get("company_name", result.get("prospect_id", "Unknown")),
1391
+ "domain": result.get("company_domain", result.get("domain", "")),
1392
+ "fit_score": result.get("fit_score", 75),
1393
+ "research_complete": True,
1394
+ "discovered_at": datetime.now().strftime("%Y-%m-%d %H:%M")
1395
+ }
1396
+ merge_to_knowledge_base([prospect_data], [], [])
1397
+ response_text += f"βœ… Saved prospect: {prospect_data['name']}\n"
1398
+
1399
+ elif tool == "save_contact" and isinstance(result, dict):
1400
+ merge_to_knowledge_base([], [result], [])
1401
+ response_text += f"βœ… Saved contact\n"
1402
+
1403
+ elif tool == "send_email" and isinstance(result, dict):
1404
+ merge_to_knowledge_base([], [], [result])
1405
+ response_text += f"βœ… Email drafted\n"
1406
+
1407
+ elif tool == "search_web":
1408
+ count = result.get("count", 0) if isinstance(result, dict) else 0
1409
+ response_text += f"βœ… Found {count} results\n"
1410
+
1411
+ current_history = history + [[message, response_text]]
1412
+ yield current_history, ""
1413
+
1414
+ elif event_type == "thought":
1415
+ thought = event.get("thought", "")
1416
+ # Only show substantive thoughts, not processing messages
1417
+ if thought and len(thought) > 50 and not thought.startswith("[Processing"):
1418
+ # This is likely the AI's actual response
1419
+ pass # We'll get this in agent_complete
1420
+
1421
+ elif event_type == "agent_complete":
1422
+ final = event.get("final_answer", "")
1423
+ if final and "CX AI Agent" not in final and "Powered by AI" not in final:
1424
+ # Clean response - show just the final answer
1425
+ if response_text:
1426
+ response_text += "\n---\n\n"
1427
+ response_text += final
1428
+ elif not response_text:
1429
+ response_text = "I've processed your request. Is there anything else you'd like to know?"
1430
+ current_history = history + [[message, response_text]]
1431
+ yield current_history, ""
1432
+ return
1433
+
1434
+ elif event_type == "agent_error":
1435
+ error = event.get("error", "Unknown error")
1436
+ if "rate limit" in str(error).lower():
1437
+ response_text += "\n⚠️ Rate limit reached. Please wait a moment and try again."
1438
+ else:
1439
+ response_text += f"\n⚠️ Error: {error}"
1440
+ current_history = history + [[message, response_text]]
1441
+ yield current_history, ""
1442
+ return
1443
+
1444
+ elif event_type == "agent_max_iterations":
1445
+ if not response_text:
1446
+ response_text = "I'm still processing your request. The task may be complex - please try a simpler question or try again."
1447
+ current_history = history + [[message, response_text]]
1448
+ yield current_history, ""
1449
+ return
1450
+
1451
+ # If we get here without returning
1452
+ if not response_text:
1453
+ response_text = "I processed your request. Let me know if you need anything else!"
1454
+ yield history + [[message, response_text]], ""
1455
+
1456
+ except Exception as e:
1457
+ logger.error(f"Chat agent error: {e}")
1458
+ error_msg = str(e)
1459
+ if "rate limit" in error_msg.lower() or "429" in error_msg:
1460
+ yield history + [[message, "⚠️ Rate limit reached. Please wait a moment and try again."]], ""
1461
+ else:
1462
+ yield history + [[message, f"⚠️ Error: {error_msg}"]], ""
1463
 
1464
 
1465
  def chat_with_ai(message: str, history: list) -> tuple:
 
2238
  with gr.Column(scale=1):
2239
  gr.HTML("""
2240
  <div class="action-card">
2241
+ <h3>πŸ’‘ Try These Prompts</h3>
2242
+ <p style="font-size: 12px; color: var(--text-secondary);">Your AI assistant can help with anything sales-related</p>
2243
+ </div>
2244
+ """)
2245
+
2246
+ gr.HTML("""
2247
+ <div style="background: var(--bg-tertiary); padding: 12px; border-radius: 8px; margin-bottom: 8px;">
2248
+ <strong>πŸ” Research & Discovery</strong>
2249
+ <ul style="margin: 8px 0 0 0; padding-left: 16px; font-size: 12px; line-height: 1.8;">
2250
+ <li>"Search for DTC fashion brands that recently raised Series A funding"</li>
2251
+ <li>"Find 5 subscription box companies in the wellness space"</li>
2252
+ <li>"Research what Allbirds is doing in sustainability this year"</li>
2253
+ <li>"What are the latest trends in e-commerce personalization?"</li>
2254
+ </ul>
2255
  </div>
2256
  """)
2257
 
2258
  gr.HTML("""
2259
  <div style="background: var(--bg-tertiary); padding: 12px; border-radius: 8px; margin-bottom: 8px;">
2260
+ <strong>βœ‰οΈ Email Drafting</strong>
2261
+ <ul style="margin: 8px 0 0 0; padding-left: 16px; font-size: 12px; line-height: 1.8;">
2262
+ <li>"Draft a cold outreach email to the CEO of Warby Parker about our analytics platform"</li>
2263
+ <li>"Write a follow-up email for my meeting request - it's been 5 days with no response"</li>
2264
+ <li>"Create a meeting invite email for Thursday 3pm, 30 mins, to discuss Q1 partnership"</li>
2265
+ <li>"Compose a breakup email for prospects who haven't responded after 3 touches"</li>
2266
  </ul>
2267
  </div>
2268
  """)
2269
 
2270
  gr.HTML("""
2271
  <div style="background: var(--bg-tertiary); padding: 12px; border-radius: 8px; margin-bottom: 8px;">
2272
+ <strong>πŸ“ž Call Preparation</strong>
2273
+ <ul style="margin: 8px 0 0 0; padding-left: 16px; font-size: 12px; line-height: 1.8;">
2274
+ <li>"I have a call with Glossier tomorrow - give me 10 talking points"</li>
2275
+ <li>"What objections might Patagonia raise and how should I handle them?"</li>
2276
+ <li>"Prepare a 5-minute pitch deck outline for my Everlane demo"</li>
2277
+ <li>"What questions should I ask to qualify Bombas as a prospect?"</li>
2278
  </ul>
2279
  </div>
2280
  """)
2281
 
2282
  gr.HTML("""
2283
  <div style="background: var(--bg-tertiary); padding: 12px; border-radius: 8px; margin-bottom: 8px;">
2284
+ <strong>πŸ“Š Pipeline & Analysis</strong>
2285
+ <ul style="margin: 8px 0 0 0; padding-left: 16px; font-size: 12px; line-height: 1.8;">
2286
+ <li>"Give me a summary of all prospects and their status"</li>
2287
+ <li>"Which prospect has the highest fit score and why?"</li>
2288
+ <li>"Compare the two prospects and tell me which to prioritize"</li>
2289
+ <li>"What's the total addressable market for our solution?"</li>
2290
  </ul>
2291
  </div>
2292
  """)
2293
 
2294
  gr.HTML("""
2295
  <div style="background: var(--bg-tertiary); padding: 12px; border-radius: 8px;">
2296
+ <strong>πŸ’‘ Strategy & Insights</strong>
2297
+ <ul style="margin: 8px 0 0 0; padding-left: 16px; font-size: 12px; line-height: 1.8;">
2298
+ <li>"What's the best approach to sell to a company that just raised funding?"</li>
2299
+ <li>"How should I position our product against competitor X?"</li>
2300
+ <li>"Suggest a multi-touch outreach sequence for enterprise prospects"</li>
2301
+ <li>"What case studies should I reference when talking to retail brands?"</li>
2302
  </ul>
2303
  </div>
2304
  """)
 
2355
  generate_packet_btn.click(fn=generate_handoff_packet, inputs=[prospect_dropdown], outputs=[handoff_output])
2356
  refresh_dropdown_btn.click(fn=lambda: gr.Dropdown(choices=get_prospect_choices()), outputs=[prospect_dropdown])
2357
 
2358
+ # Async chat wrapper that uses session token
2359
+ async def chat_async_wrapper(message, history):
2360
+ token = session_hf_token.get("token", "")
2361
+ final_result = (history, "")
2362
+ async for result in chat_with_ai_async(message, history, token):
2363
+ final_result = result
2364
+ return final_result
2365
+
2366
+ send_btn.click(fn=chat_async_wrapper, inputs=[chat_input, chatbot], outputs=[chatbot, chat_input])
2367
+ chat_input.submit(fn=chat_async_wrapper, inputs=[chat_input, chatbot], outputs=[chatbot, chat_input])
2368
 
2369
  return demo
2370