| from llms import LLM | |
| from utils.remote_client import execute_remote_task | |
| def text_translation(text: str, model: str, src_lang: str, tgt_lang: str, custom_instructions: str = "", use_llm: bool = True) -> str: | |
| """ | |
| Translate the input text using either LLM or traditional (Modal API) method. | |
| """ | |
| if not text.strip(): | |
| return "" | |
| if use_llm: | |
| return _translation_with_llm(text, model, src_lang, tgt_lang, custom_instructions) | |
| else: | |
| return _translation_with_traditional(text, model, src_lang, tgt_lang) | |
| def _translation_with_llm(text: str, model: str, src_lang: str, tgt_lang: str, custom_instructions: str = "") -> str: | |
| try: | |
| llm = LLM(model=model) | |
| prompt = ( | |
| f"Translate the following text from {src_lang} to {tgt_lang}.\n" + | |
| (f"{custom_instructions}\n" if custom_instructions else "") + | |
| f"Text: {text}\nTranslation:" | |
| ) | |
| translation = llm.generate(prompt) | |
| return translation.strip() | |
| except Exception as e: | |
| print(f"Error in LLM translation: {str(e)}") | |
| return "Oops! Something went wrong. Please try again later." | |
| def _translation_with_traditional(text: str, model: str, src_lang: str, tgt_lang: str) -> str: | |
| try: | |
| payload = { | |
| "text": text, | |
| "model": model, | |
| "src_lang": src_lang, | |
| "tgt_lang": tgt_lang, | |
| } | |
| resp = execute_remote_task("translation", payload) | |
| if "error" in resp: | |
| return "Oops! Something went wrong. Please try again later." | |
| return resp.get("translation", "") | |
| except Exception as e: | |
| print(f"Error in traditional translation: {str(e)}") | |
| return "Oops! Something went wrong. Please try again later." | |