shamik
feat: downgrading the python version from 3.12.10 to 3.11.6 for llama-hub installation problems.
b4fb438
unverified
| # This file was autogenerated by uv via the following command: | |
| # uv pip compile pyproject.toml -o requirements.txt | |
| accelerate==1.7.0 | |
| # via | |
| # flagembedding | |
| # peft | |
| # transformers | |
| aiofiles==24.1.0 | |
| # via gradio | |
| aiohappyeyeballs==2.6.1 | |
| # via aiohttp | |
| aiohttp==3.12.11 | |
| # via | |
| # fsspec | |
| # huggingface-hub | |
| # llama-index-core | |
| aiosignal==1.3.2 | |
| # via aiohttp | |
| aiosqlite==0.21.0 | |
| # via llama-index-core | |
| annotated-types==0.7.0 | |
| # via pydantic | |
| anyio==4.9.0 | |
| # via | |
| # gradio | |
| # httpx | |
| # openai | |
| # starlette | |
| arxiv==2.2.0 | |
| # via agent-hackathon (pyproject.toml) | |
| attrs==25.3.0 | |
| # via aiohttp | |
| banks==2.1.2 | |
| # via llama-index-core | |
| beautifulsoup4==4.13.4 | |
| # via | |
| # ir-datasets | |
| # llama-index-readers-file | |
| cbor==1.0.0 | |
| # via trec-car-tools | |
| certifi==2025.4.26 | |
| # via | |
| # httpcore | |
| # httpx | |
| # llama-cloud | |
| # requests | |
| charset-normalizer==3.4.2 | |
| # via requests | |
| click==8.2.1 | |
| # via | |
| # duckduckgo-search | |
| # llama-cloud-services | |
| # nltk | |
| # typer | |
| # uvicorn | |
| colorama==0.4.6 | |
| # via griffe | |
| dataclasses-json==0.6.7 | |
| # via llama-index-core | |
| datasets==3.6.0 | |
| # via flagembedding | |
| deprecated==1.2.18 | |
| # via | |
| # banks | |
| # llama-index-core | |
| dill==0.3.8 | |
| # via | |
| # datasets | |
| # multiprocess | |
| dirtyjson==1.0.8 | |
| # via llama-index-core | |
| distro==1.9.0 | |
| # via openai | |
| duckduckgo-search==6.4.2 | |
| # via llama-index-tools-duckduckgo | |
| fastapi==0.115.12 | |
| # via gradio | |
| feedparser==6.0.11 | |
| # via arxiv | |
| ffmpy==0.6.0 | |
| # via gradio | |
| filelock==3.18.0 | |
| # via | |
| # datasets | |
| # huggingface-hub | |
| # torch | |
| # transformers | |
| filetype==1.2.0 | |
| # via llama-index-core | |
| flagembedding==1.3.5 | |
| # via agent-hackathon (pyproject.toml) | |
| frozenlist==1.6.2 | |
| # via | |
| # aiohttp | |
| # aiosignal | |
| fsspec==2025.3.0 | |
| # via | |
| # datasets | |
| # gradio-client | |
| # huggingface-hub | |
| # llama-index-core | |
| # torch | |
| gradio==5.33.1 | |
| # via agent-hackathon (pyproject.toml) | |
| gradio-client==1.10.3 | |
| # via gradio | |
| greenlet==3.2.3 | |
| # via sqlalchemy | |
| griffe==1.7.3 | |
| # via banks | |
| groovy==0.1.2 | |
| # via gradio | |
| grpcio==1.67.1 | |
| # via pymilvus | |
| h11==0.16.0 | |
| # via | |
| # httpcore | |
| # uvicorn | |
| hf-xet==1.1.3 | |
| # via huggingface-hub | |
| html2text==2025.4.15 | |
| # via llama-hub | |
| httpcore==1.0.9 | |
| # via httpx | |
| httpx==0.28.1 | |
| # via | |
| # agent-hackathon (pyproject.toml) | |
| # gradio | |
| # gradio-client | |
| # llama-cloud | |
| # llama-index-core | |
| # openai | |
| # safehttpx | |
| huggingface-hub==0.32.4 | |
| # via | |
| # agent-hackathon (pyproject.toml) | |
| # accelerate | |
| # datasets | |
| # gradio | |
| # gradio-client | |
| # llama-index-embeddings-huggingface | |
| # llama-index-embeddings-huggingface-api | |
| # llama-index-llms-huggingface-api | |
| # llama-index-utils-huggingface | |
| # peft | |
| # sentence-transformers | |
| # smolagents | |
| # tokenizers | |
| # transformers | |
| idna==3.10 | |
| # via | |
| # anyio | |
| # httpx | |
| # requests | |
| # yarl | |
| ijson==3.4.0 | |
| # via ir-datasets | |
| inquirerpy==0.3.4 | |
| # via huggingface-hub | |
| inscriptis==2.6.0 | |
| # via ir-datasets | |
| ir-datasets==0.5.10 | |
| # via flagembedding | |
| jinja2==3.1.6 | |
| # via | |
| # banks | |
| # gradio | |
| # smolagents | |
| # torch | |
| jiter==0.10.0 | |
| # via openai | |
| joblib==1.5.1 | |
| # via | |
| # nltk | |
| # scikit-learn | |
| llama-cloud==0.1.23 | |
| # via | |
| # llama-cloud-services | |
| # llama-index-indices-managed-llama-cloud | |
| llama-cloud-services==0.6.30 | |
| # via llama-parse | |
| llama-hub==0.0.79.post1 | |
| # via agent-hackathon (pyproject.toml) | |
| llama-index==0.12.41 | |
| # via llama-hub | |
| llama-index-agent-openai==0.4.9 | |
| # via | |
| # llama-index | |
| # llama-index-program-openai | |
| llama-index-cli==0.4.3 | |
| # via llama-index | |
| llama-index-core==0.12.41 | |
| # via | |
| # llama-cloud-services | |
| # llama-index | |
| # llama-index-agent-openai | |
| # llama-index-cli | |
| # llama-index-embeddings-huggingface | |
| # llama-index-embeddings-huggingface-api | |
| # llama-index-embeddings-openai | |
| # llama-index-indices-managed-llama-cloud | |
| # llama-index-llms-huggingface | |
| # llama-index-llms-huggingface-api | |
| # llama-index-llms-nebius | |
| # llama-index-llms-openai | |
| # llama-index-llms-openai-like | |
| # llama-index-multi-modal-llms-openai | |
| # llama-index-program-openai | |
| # llama-index-question-gen-openai | |
| # llama-index-readers-file | |
| # llama-index-readers-llama-parse | |
| # llama-index-tools-duckduckgo | |
| # llama-index-utils-huggingface | |
| # llama-index-vector-stores-milvus | |
| llama-index-embeddings-huggingface==0.5.4 | |
| # via agent-hackathon (pyproject.toml) | |
| llama-index-embeddings-huggingface-api==0.3.1 | |
| # via agent-hackathon (pyproject.toml) | |
| llama-index-embeddings-openai==0.3.1 | |
| # via | |
| # llama-index | |
| # llama-index-cli | |
| llama-index-indices-managed-llama-cloud==0.7.4 | |
| # via llama-index | |
| llama-index-llms-huggingface==0.5.0 | |
| # via agent-hackathon (pyproject.toml) | |
| llama-index-llms-huggingface-api==0.5.0 | |
| # via agent-hackathon (pyproject.toml) | |
| llama-index-llms-nebius==0.1.2 | |
| # via agent-hackathon (pyproject.toml) | |
| llama-index-llms-openai==0.4.4 | |
| # via | |
| # llama-index | |
| # llama-index-agent-openai | |
| # llama-index-cli | |
| # llama-index-llms-openai-like | |
| # llama-index-multi-modal-llms-openai | |
| # llama-index-program-openai | |
| # llama-index-question-gen-openai | |
| llama-index-llms-openai-like==0.4.0 | |
| # via llama-index-llms-nebius | |
| llama-index-multi-modal-llms-openai==0.5.1 | |
| # via llama-index | |
| llama-index-program-openai==0.3.2 | |
| # via | |
| # llama-index | |
| # llama-index-question-gen-openai | |
| llama-index-question-gen-openai==0.3.1 | |
| # via llama-index | |
| llama-index-readers-file==0.4.8 | |
| # via llama-index | |
| llama-index-readers-llama-parse==0.4.0 | |
| # via llama-index | |
| llama-index-tools-duckduckgo==0.3.0 | |
| # via agent-hackathon (pyproject.toml) | |
| llama-index-utils-huggingface==0.3.0 | |
| # via llama-index-embeddings-huggingface-api | |
| llama-index-vector-stores-milvus==0.8.4 | |
| # via agent-hackathon (pyproject.toml) | |
| llama-parse==0.6.30 | |
| # via llama-index-readers-llama-parse | |
| lxml==5.4.0 | |
| # via | |
| # inscriptis | |
| # ir-datasets | |
| lz4==4.4.4 | |
| # via ir-datasets | |
| markdown-it-py==3.0.0 | |
| # via rich | |
| markupsafe==3.0.2 | |
| # via | |
| # gradio | |
| # jinja2 | |
| marshmallow==3.26.1 | |
| # via dataclasses-json | |
| mdurl==0.1.2 | |
| # via markdown-it-py | |
| milvus-lite==2.4.12 | |
| # via pymilvus | |
| mpmath==1.3.0 | |
| # via sympy | |
| multidict==6.4.4 | |
| # via | |
| # aiohttp | |
| # yarl | |
| multiprocess==0.70.16 | |
| # via datasets | |
| mypy-extensions==1.1.0 | |
| # via typing-inspect | |
| nest-asyncio==1.6.0 | |
| # via llama-index-core | |
| networkx==3.4.2 | |
| # via | |
| # llama-index-core | |
| # torch | |
| nltk==3.9.1 | |
| # via | |
| # llama-index | |
| # llama-index-core | |
| numpy==2.2.6 | |
| # via | |
| # accelerate | |
| # datasets | |
| # gradio | |
| # ir-datasets | |
| # llama-index-core | |
| # pandas | |
| # peft | |
| # scikit-learn | |
| # scipy | |
| # transformers | |
| # trec-car-tools | |
| openai==1.84.0 | |
| # via | |
| # agent-hackathon (pyproject.toml) | |
| # llama-index-agent-openai | |
| # llama-index-embeddings-openai | |
| # llama-index-llms-openai | |
| orjson==3.10.18 | |
| # via gradio | |
| packaging==25.0 | |
| # via | |
| # accelerate | |
| # datasets | |
| # gradio | |
| # gradio-client | |
| # huggingface-hub | |
| # marshmallow | |
| # peft | |
| # transformers | |
| pandas==2.3.0 | |
| # via | |
| # datasets | |
| # gradio | |
| # llama-index-readers-file | |
| # pymilvus | |
| peft==0.15.2 | |
| # via flagembedding | |
| pfzy==0.3.4 | |
| # via inquirerpy | |
| pillow==11.2.1 | |
| # via | |
| # gradio | |
| # llama-index-core | |
| # sentence-transformers | |
| # smolagents | |
| pip==25.1.1 | |
| # via agent-hackathon (pyproject.toml) | |
| platformdirs==4.3.8 | |
| # via | |
| # banks | |
| # llama-cloud-services | |
| primp==0.15.0 | |
| # via duckduckgo-search | |
| prompt-toolkit==3.0.51 | |
| # via inquirerpy | |
| propcache==0.3.1 | |
| # via | |
| # aiohttp | |
| # yarl | |
| protobuf==6.31.1 | |
| # via | |
| # flagembedding | |
| # pymilvus | |
| psutil==7.0.0 | |
| # via | |
| # accelerate | |
| # llama-hub | |
| # peft | |
| pyaml==23.12.0 | |
| # via llama-hub | |
| pyarrow==20.0.0 | |
| # via | |
| # datasets | |
| # ir-datasets | |
| pydantic==2.11.5 | |
| # via | |
| # banks | |
| # fastapi | |
| # gradio | |
| # llama-cloud | |
| # llama-cloud-services | |
| # llama-index-core | |
| # openai | |
| pydantic-core==2.33.2 | |
| # via pydantic | |
| pydub==0.25.1 | |
| # via gradio | |
| pygments==2.19.1 | |
| # via rich | |
| pymilvus==2.5.10 | |
| # via llama-index-vector-stores-milvus | |
| pypdf==5.6.0 | |
| # via llama-index-readers-file | |
| pyprojroot==0.3.0 | |
| # via agent-hackathon (pyproject.toml) | |
| python-dateutil==2.9.0.post0 | |
| # via pandas | |
| python-dotenv==1.1.0 | |
| # via | |
| # agent-hackathon (pyproject.toml) | |
| # llama-cloud-services | |
| # pymilvus | |
| # smolagents | |
| python-multipart==0.0.20 | |
| # via gradio | |
| pytz==2025.2 | |
| # via pandas | |
| pyyaml==6.0.2 | |
| # via | |
| # accelerate | |
| # datasets | |
| # gradio | |
| # huggingface-hub | |
| # ir-datasets | |
| # llama-index-core | |
| # peft | |
| # pyaml | |
| # transformers | |
| regex==2024.11.6 | |
| # via | |
| # nltk | |
| # tiktoken | |
| # transformers | |
| requests==2.32.3 | |
| # via | |
| # arxiv | |
| # datasets | |
| # huggingface-hub | |
| # inscriptis | |
| # ir-datasets | |
| # llama-index-core | |
| # smolagents | |
| # tiktoken | |
| # transformers | |
| retrying==1.3.4 | |
| # via llama-hub | |
| rich==14.0.0 | |
| # via | |
| # smolagents | |
| # typer | |
| ruff==0.11.13 | |
| # via gradio | |
| safehttpx==0.1.6 | |
| # via gradio | |
| safetensors==0.5.3 | |
| # via | |
| # accelerate | |
| # peft | |
| # transformers | |
| scikit-learn==1.7.0 | |
| # via sentence-transformers | |
| scipy==1.15.3 | |
| # via | |
| # scikit-learn | |
| # sentence-transformers | |
| semantic-version==2.10.0 | |
| # via gradio | |
| sentence-transformers==4.1.0 | |
| # via | |
| # flagembedding | |
| # llama-index-embeddings-huggingface | |
| sentencepiece==0.2.0 | |
| # via flagembedding | |
| setuptools==80.9.0 | |
| # via pymilvus | |
| sgmllib3k==1.0.0 | |
| # via feedparser | |
| shellingham==1.5.4 | |
| # via typer | |
| six==1.17.0 | |
| # via | |
| # python-dateutil | |
| # retrying | |
| smolagents==1.17.0 | |
| # via agent-hackathon (pyproject.toml) | |
| sniffio==1.3.1 | |
| # via | |
| # anyio | |
| # openai | |
| soupsieve==2.7 | |
| # via beautifulsoup4 | |
| sqlalchemy==2.0.41 | |
| # via llama-index-core | |
| starlette==0.46.2 | |
| # via | |
| # fastapi | |
| # gradio | |
| striprtf==0.0.26 | |
| # via llama-index-readers-file | |
| sympy==1.13.1 | |
| # via torch | |
| tenacity==9.1.2 | |
| # via llama-index-core | |
| threadpoolctl==3.6.0 | |
| # via scikit-learn | |
| tiktoken==0.9.0 | |
| # via llama-index-core | |
| tokenizers==0.21.1 | |
| # via transformers | |
| tomlkit==0.13.3 | |
| # via gradio | |
| torch==2.6.0 | |
| # via | |
| # accelerate | |
| # flagembedding | |
| # llama-index-llms-huggingface | |
| # peft | |
| # sentence-transformers | |
| # transformers | |
| tqdm==4.67.1 | |
| # via | |
| # datasets | |
| # huggingface-hub | |
| # ir-datasets | |
| # llama-index-core | |
| # milvus-lite | |
| # nltk | |
| # openai | |
| # peft | |
| # sentence-transformers | |
| # transformers | |
| transformers==4.52.4 | |
| # via | |
| # flagembedding | |
| # llama-index-llms-huggingface | |
| # llama-index-llms-openai-like | |
| # peft | |
| # sentence-transformers | |
| trec-car-tools==2.6 | |
| # via ir-datasets | |
| typer==0.16.0 | |
| # via gradio | |
| typing-extensions==4.14.0 | |
| # via | |
| # aiosqlite | |
| # anyio | |
| # beautifulsoup4 | |
| # fastapi | |
| # gradio | |
| # gradio-client | |
| # huggingface-hub | |
| # llama-index-core | |
| # openai | |
| # pydantic | |
| # pydantic-core | |
| # pyprojroot | |
| # sentence-transformers | |
| # sqlalchemy | |
| # torch | |
| # typer | |
| # typing-inspect | |
| # typing-inspection | |
| typing-inspect==0.9.0 | |
| # via | |
| # dataclasses-json | |
| # llama-index-core | |
| typing-inspection==0.4.1 | |
| # via pydantic | |
| tzdata==2025.2 | |
| # via pandas | |
| ujson==5.10.0 | |
| # via pymilvus | |
| unlzw3==0.2.3 | |
| # via ir-datasets | |
| urllib3==2.4.0 | |
| # via requests | |
| uvicorn==0.34.3 | |
| # via gradio | |
| warc3-wet==0.2.5 | |
| # via ir-datasets | |
| warc3-wet-clueweb09==0.2.5 | |
| # via ir-datasets | |
| wcwidth==0.2.13 | |
| # via prompt-toolkit | |
| websockets==15.0.1 | |
| # via gradio-client | |
| wrapt==1.17.2 | |
| # via | |
| # deprecated | |
| # llama-index-core | |
| xxhash==3.5.0 | |
| # via datasets | |
| yarl==1.20.0 | |
| # via aiohttp | |
| zlib-state==0.1.9 | |
| # via ir-datasets | |