import gradio as gr import spaces from models import CodeModel from utils import format_code_response, parse_model_output import torch import os from typing import List, Dict, Any # Initialize the code model code_model = CodeModel() def chat_with_coder(message: str, history: List[Dict[str, str]], language: str = "python", temperature: float = 0.7) -> Dict[str, Any]: """Main chatbot function that handles coding queries with a 5B parameter model.""" try: # Add context about coding capabilities system_prompt = f"""You are an expert {language} programmer and AI coding assistant. You help users with: - Writing and debugging {language} code - Code optimization and best practices - Explaining complex programming concepts - Code review and suggestions - Algorithm implementation Always provide clean, well-commented, and efficient code. Format code blocks properly with language specification.""" # Prepare messages for the model messages = [{"role": "system", "content": system_prompt}] messages.extend(history) messages.append({"role": "user", "content": message}) # Generate response using the model response = code_model.generate( messages=messages, temperature=temperature, max_new_tokens=2048, language=language ) # Parse and format the response formatted_response = format_code_response(response) # Update chat history new_history = history.copy() new_history.append({"role": "user", "content": message}) new_history.append({"role": "assistant", "content": formatted_response}) return {"choices": [{"message": {"content": formatted_response}}], "history": new_history} except Exception as e: error_msg = f"I apologize, but I encountered an error: {str(e)}. Please try again or rephrase your question." return {"choices": [{"message": {"content": error_msg}}], "history": history} def clear_chat(): """Clear the chat history.""" return {"choices": [{"message": {"content": "Hello! I'm your AI coding assistant powered by a 5B parameter language model. I can help you with Python, JavaScript, Java, C++, and many other programming languages. What would you like to code today?"}}], "history": []} def create_demo(): """Create the Gradio demo interface.""" with gr.Blocks( title="AI Coder - 5B Parameter Chatbot", description="Powered by a 5B parameter language model with coding capabilities", theme=gr.themes.Soft(), css=""" .container {max-width: 1200px !important;} .header {text-align: center; padding: 20px;} .header h1 {color: #2d3748; margin-bottom: 10px;} .header a {color: #3182ce; text-decoration: none; font-weight: bold;} .header a:hover {text-decoration: underline;} .coding-section {background: #f7fafc; border-radius: 8px; padding: 15px; margin: 10px 0;} """ ) as demo: # Header gr.HTML("""

🤖 AI Coder - Powered by 5B Parameter Model

Advanced AI chatbot with comprehensive coding features using a 5B parameter language model

Built with anycoder

""") # Main chat interface with gr.Row(): # Left column - Chat with gr.Column(scale=3): chatbot = gr.Chatbot( label="AI Coding Assistant", height=600, type="messages", avatar_images=(None, "🤖"), show_copy_button=True ) with gr.Row(): msg = gr.Textbox( placeholder="Ask me to code something, debug code, or explain programming concepts...", lines=3, scale=4 ) send_btn = gr.Button("Send", variant="primary", scale=1) with gr.Row(): clear_btn = gr.Button("Clear Chat", variant="secondary") # Right column - Controls with gr.Column(scale=1): gr.Markdown("### 🛠️ Coding Settings") language = gr.Dropdown( choices=[ "python", "javascript", "java", "cpp", "c", "go", "rust", "typescript", "php", "ruby", "swift", "kotlin", "sql", "html", "css", "bash", "powershell" ], value="python", label="Programming Language", info="Target language for code generation" ) temperature = gr.Slider( minimum=0.1, maximum=1.0, value=0.7, step=0.1, label="Creativity (Temperature)", info="Lower for precise code, higher for creative solutions" ) with gr.Accordion("🎯 Quick Coding Prompts", open=False): gr.Examples( examples=[ "Write a Python function to reverse a linked list", "Create a React component for a login form", "Debug this JavaScript code: [paste code]", "Explain Big O notation with code examples", "Write SQL queries for a user management system", "Create a binary search algorithm in C++" ], inputs=msg, examples_per_page=3 ) with gr.Accordion("🔧 Model Info", open=False): gr.Markdown(f""" **Model:** {code_model.model_name} **Parameters:** {code_model.parameter_count} **Max Context:** {code_model.max_length:,} tokens **Device:** {'CUDA' if torch.cuda.is_available() else 'CPU'} **Status:** {'✅ Ready' if code_model.is_loaded else '⏳ Loading...'} """) # Event handlers def user(user_message, history): return "", history + [{"role": "user", "content": user_message}] def bot(history, selected_language, temp): if not history: return history last_message = history[-1]["content"] result = chat_with_coder(last_message, history[:-1], selected_language, temp) return result["history"] # Wire up events msg.submit( user, [msg, chatbot], [msg, chatbot], queue=False ).then( bot, [chatbot, language, temperature], chatbot ) send_btn.click( user, [msg, chatbot], [msg, chatbot], queue=False ).then( bot, [chatbot, language, temperature], chatbot ) clear_btn.click( clear_chat, outputs=[chatbot] ) # Load initial message chatbot.value = [{"role": "assistant", "content": "Hello! I'm your AI coding assistant powered by a 5B parameter language model. I can help you with Python, JavaScript, Java, C++, and many other programming languages. What would you like to code today?"}] return demo if __name__ == "__main__": demo = create_demo() demo.launch( server_name="0.0.0.0", server_port=7860, show_error=True, share=False, debug=True ) I've fixed the syntax error in the app.py file by removing the problematic line that was causing the issue. The file should now run properly without syntax errors.