Spaces:
Sleeping
Sleeping
Fix Dockerfile build issues and update .env.example with correct API keys
Browse files- .env.example +16 -7
- Dockerfile +21 -4
.env.example
CHANGED
|
@@ -1,12 +1,21 @@
|
|
| 1 |
-
#
|
| 2 |
-
# Get your key from: https://
|
| 3 |
-
|
| 4 |
|
| 5 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
# If not provided, the app will use NLTK VADER as fallback
|
| 7 |
# Get your credentials from Azure Portal after creating a "Language service" resource
|
| 8 |
-
AZURE_TEXT_ANALYTICS_KEY=
|
| 9 |
-
AZURE_TEXT_ANALYTICS_ENDPOINT=
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 10 |
|
| 11 |
-
# Optional
|
| 12 |
PORT=7860
|
|
|
|
| 1 |
+
# DeepSeek API Key (Required)
|
| 2 |
+
# Get your key from: https://platform.deepseek.com/
|
| 3 |
+
DEEPSEEK_API_KEY=your_deepseek_api_key_here
|
| 4 |
|
| 5 |
+
# Inflection AI API Key (Required)
|
| 6 |
+
# Get your key from: https://inflection.ai/
|
| 7 |
+
INFLECTION_AI_API_KEY=your_inflection_ai_api_key_here
|
| 8 |
+
|
| 9 |
+
# Azure Text Analytics (Optional - for enhanced sentiment analysis)
|
| 10 |
# If not provided, the app will use NLTK VADER as fallback
|
| 11 |
# Get your credentials from Azure Portal after creating a "Language service" resource
|
| 12 |
+
AZURE_TEXT_ANALYTICS_KEY=your_azure_text_analytics_key_here
|
| 13 |
+
AZURE_TEXT_ANALYTICS_ENDPOINT=https://your-resource.cognitiveservices.azure.com/
|
| 14 |
+
|
| 15 |
+
# ANU Quantum API Key (Optional - for quantum randomness)
|
| 16 |
+
# If not provided, the app will use pseudo-random numbers
|
| 17 |
+
# Get your key from: https://quantumnumbers.anu.edu.au/
|
| 18 |
+
ANU_QUANTUM_API_KEY=your_quantum_api_key_here
|
| 19 |
|
| 20 |
+
# Port (Optional - defaults to 7860 for Hugging Face Spaces)
|
| 21 |
PORT=7860
|
Dockerfile
CHANGED
|
@@ -8,19 +8,36 @@ WORKDIR /app
|
|
| 8 |
RUN apt-get update && apt-get install -y \
|
| 9 |
build-essential \
|
| 10 |
git \
|
|
|
|
|
|
|
| 11 |
&& rm -rf /var/lib/apt/lists/*
|
| 12 |
|
| 13 |
# Copy requirements first for better caching
|
| 14 |
COPY requirements.txt .
|
| 15 |
|
| 16 |
-
# Install Python dependencies
|
| 17 |
-
RUN pip install --no-cache-dir
|
|
|
|
| 18 |
|
| 19 |
# Copy the entire application
|
| 20 |
COPY . .
|
| 21 |
|
| 22 |
-
# Download NLTK data during build
|
| 23 |
-
RUN python -c "import nltk;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
|
| 25 |
# Expose the port the app runs on
|
| 26 |
EXPOSE 7860
|
|
|
|
| 8 |
RUN apt-get update && apt-get install -y \
|
| 9 |
build-essential \
|
| 10 |
git \
|
| 11 |
+
curl \
|
| 12 |
+
ca-certificates \
|
| 13 |
&& rm -rf /var/lib/apt/lists/*
|
| 14 |
|
| 15 |
# Copy requirements first for better caching
|
| 16 |
COPY requirements.txt .
|
| 17 |
|
| 18 |
+
# Install Python dependencies with retry logic
|
| 19 |
+
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \
|
| 20 |
+
pip install --no-cache-dir -r requirements.txt
|
| 21 |
|
| 22 |
# Copy the entire application
|
| 23 |
COPY . .
|
| 24 |
|
| 25 |
+
# Download NLTK data during build with error handling
|
| 26 |
+
RUN python -c "import nltk; import os; \
|
| 27 |
+
nltk_data_dir = '/root/nltk_data'; \
|
| 28 |
+
os.makedirs(nltk_data_dir, exist_ok=True); \
|
| 29 |
+
nltk.data.path.append(nltk_data_dir); \
|
| 30 |
+
try: \
|
| 31 |
+
nltk.download('punkt', quiet=True); \
|
| 32 |
+
print('Downloaded punkt'); \
|
| 33 |
+
except Exception as e: \
|
| 34 |
+
print(f'Failed to download punkt: {e}'); \
|
| 35 |
+
try: \
|
| 36 |
+
nltk.download('vader_lexicon', quiet=True); \
|
| 37 |
+
print('Downloaded vader_lexicon'); \
|
| 38 |
+
except Exception as e: \
|
| 39 |
+
print(f'Failed to download vader_lexicon: {e}'); \
|
| 40 |
+
print('NLTK download step completed')"
|
| 41 |
|
| 42 |
# Expose the port the app runs on
|
| 43 |
EXPOSE 7860
|