Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -9,14 +9,9 @@ client = OpenAI(api_key=OPENAI_API_KEY)
|
|
| 9 |
OPEN_AI_MODEL = "gpt-4-1106-preview"
|
| 10 |
|
| 11 |
# thread = gr.State(client.beta.threads.create())
|
| 12 |
-
thread_id = None
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
global thread_id
|
| 16 |
-
if thread_id is None:
|
| 17 |
-
thread = client.beta.threads.create()
|
| 18 |
-
thread_id = thread.id
|
| 19 |
-
return thread_id, thread
|
| 20 |
|
| 21 |
def wait_on_run(run, thread):
|
| 22 |
while run.status == "queued" or run.status == "in_progress":
|
|
@@ -108,17 +103,20 @@ def response_evaluation_for_case_tx(thread_id, query, question_text, input, outp
|
|
| 108 |
|
| 109 |
|
| 110 |
def run_chat_in_all_cases(message, history, question_text,input, output, examples, code_written):
|
| 111 |
-
|
| 112 |
-
|
| 113 |
-
|
| 114 |
-
|
|
|
|
|
|
|
|
|
|
| 115 |
print(thread_id)
|
| 116 |
if not message and not code_written:
|
| 117 |
-
ai_message = opening_statement(thread_id
|
| 118 |
if not code_written:
|
| 119 |
-
ai_message = get_response_for_case_t0(thread_id
|
| 120 |
else:
|
| 121 |
-
ai_message = response_evaluation_for_case_tx(thread_id
|
| 122 |
print({"question_text":question_text, "input":input, "output":output, "examples":examples,
|
| 123 |
"user_code":code_written, "query":message, "ai_message":ai_message})
|
| 124 |
return ai_message
|
|
|
|
| 9 |
OPEN_AI_MODEL = "gpt-4-1106-preview"
|
| 10 |
|
| 11 |
# thread = gr.State(client.beta.threads.create())
|
| 12 |
+
# thread_id = None
|
| 13 |
+
|
| 14 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
|
| 16 |
def wait_on_run(run, thread):
|
| 17 |
while run.status == "queued" or run.status == "in_progress":
|
|
|
|
| 103 |
|
| 104 |
|
| 105 |
def run_chat_in_all_cases(message, history, question_text,input, output, examples, code_written):
|
| 106 |
+
if "thread_id" in gr.get_session():
|
| 107 |
+
pass
|
| 108 |
+
else:
|
| 109 |
+
thread = client.beta.threads.create()
|
| 110 |
+
thread_id = thread.id
|
| 111 |
+
gr.save_to_session(thread_id=thread_id)
|
| 112 |
+
gr.save_to_session(thread=thread)
|
| 113 |
print(thread_id)
|
| 114 |
if not message and not code_written:
|
| 115 |
+
ai_message = opening_statement(thread_id, question_text, input, output, examples, thread)
|
| 116 |
if not code_written:
|
| 117 |
+
ai_message = get_response_for_case_t0(thread_id, message, question_text, input, output, examples, thread)
|
| 118 |
else:
|
| 119 |
+
ai_message = response_evaluation_for_case_tx(thread_id, message, question_text, input, output, examples, code_written, thread)
|
| 120 |
print({"question_text":question_text, "input":input, "output":output, "examples":examples,
|
| 121 |
"user_code":code_written, "query":message, "ai_message":ai_message})
|
| 122 |
return ai_message
|