Spaces:
Runtime error
Runtime error
luelhagos
commited on
Commit
·
6b111fb
1
Parent(s):
96c86c7
updated
Browse files- app.py +0 -4
- requirements.txt +1 -1
app.py
CHANGED
|
@@ -3,13 +3,9 @@ from transformers import pipeline, GPT2TokenizerFast
|
|
| 3 |
|
| 4 |
modelId = "luel/gpt2-tigrinya-small"
|
| 5 |
|
| 6 |
-
print("Loading tokenizer...")
|
| 7 |
tokenizer = GPT2TokenizerFast.from_pretrained(modelId, model_max_length=128)
|
| 8 |
-
print("Tokenizer loaded.")
|
| 9 |
|
| 10 |
-
print("Loading model...")
|
| 11 |
generator = pipeline("text-generation", model=modelId, tokenizer=tokenizer, pad_token_id=tokenizer.pad_token_id, eos_token_id=tokenizer.eos_token_id)
|
| 12 |
-
print("Model loaded.")
|
| 13 |
|
| 14 |
def generate_text(prompt, max_length, temperature):
|
| 15 |
try:
|
|
|
|
| 3 |
|
| 4 |
modelId = "luel/gpt2-tigrinya-small"
|
| 5 |
|
|
|
|
| 6 |
tokenizer = GPT2TokenizerFast.from_pretrained(modelId, model_max_length=128)
|
|
|
|
| 7 |
|
|
|
|
| 8 |
generator = pipeline("text-generation", model=modelId, tokenizer=tokenizer, pad_token_id=tokenizer.pad_token_id, eos_token_id=tokenizer.eos_token_id)
|
|
|
|
| 9 |
|
| 10 |
def generate_text(prompt, max_length, temperature):
|
| 11 |
try:
|
requirements.txt
CHANGED
|
@@ -1,2 +1,2 @@
|
|
| 1 |
-
transformers==4.
|
| 2 |
torch==2.5.0
|
|
|
|
| 1 |
+
transformers==4.31.0
|
| 2 |
torch==2.5.0
|