# This file was autogenerated by uv via the following command: # uv pip compile notebooks/requirements_cpu.in -o notebooks/requirements_cpu.txt aiohttp==3.9.5 # via # datasets # fsspec # llama-index-core # llama-index-legacy aiosignal==1.3.1 # via aiohttp annotated-types==0.7.0 # via pydantic anyio==4.4.0 # via # httpx # openai attrs==23.2.0 # via # aiohttp # jsonschema # referencing beautifulsoup4==4.12.3 # via llama-index-readers-file certifi==2024.6.2 # via # httpcore # httpx # requests charset-normalizer==3.3.2 # via requests click==8.1.7 # via nltk cloudpickle==3.0.0 # via outlines dataclasses-json==0.6.7 # via # llama-index-core # llama-index-legacy datasets==2.20.0 # via # -r notebooks/requirements_cpu.in # outlines deprecated==1.2.14 # via # llama-index-core # llama-index-legacy dill==0.3.8 # via # datasets # multiprocess dirtyjson==1.0.8 # via # llama-index-core # llama-index-legacy diskcache==5.6.3 # via outlines distro==1.9.0 # via openai filelock==3.15.1 # via # datasets # huggingface-hub frozenlist==1.4.1 # via # aiohttp # aiosignal fsspec==2024.5.0 # via # datasets # huggingface-hub # llama-index-core # llama-index-legacy greenlet==3.0.3 # via sqlalchemy h11==0.14.0 # via httpcore httpcore==1.0.5 # via httpx httpx==0.27.0 # via # llama-index-core # llama-index-legacy # llamaindex-py-client # openai huggingface-hub==0.23.4 # via datasets idna==3.7 # via # anyio # httpx # requests # yarl interegular==0.3.3 # via outlines jinja2==3.1.4 # via outlines joblib==1.4.2 # via nltk jsonschema==4.22.0 # via outlines jsonschema-specifications==2023.12.1 # via jsonschema lark==1.1.9 # via outlines llama-index==0.10.46 # via -r notebooks/requirements_cpu.in llama-index-agent-openai==0.2.7 # via # llama-index # llama-index-program-openai llama-index-cli==0.1.12 # via llama-index llama-index-core==0.10.46 # via # llama-index # llama-index-agent-openai # llama-index-cli # llama-index-embeddings-openai # llama-index-indices-managed-llama-cloud # llama-index-llms-openai # llama-index-multi-modal-llms-openai # llama-index-program-openai # llama-index-question-gen-openai # llama-index-readers-file # llama-index-readers-llama-parse # llama-parse llama-index-embeddings-openai==0.1.10 # via # llama-index # llama-index-cli llama-index-indices-managed-llama-cloud==0.1.6 # via llama-index llama-index-legacy==0.9.48 # via llama-index llama-index-llms-openai==0.1.22 # via # llama-index # llama-index-agent-openai # llama-index-cli # llama-index-multi-modal-llms-openai # llama-index-program-openai # llama-index-question-gen-openai llama-index-multi-modal-llms-openai==0.1.6 # via llama-index llama-index-program-openai==0.1.6 # via # llama-index # llama-index-question-gen-openai llama-index-question-gen-openai==0.1.3 # via llama-index llama-index-readers-file==0.1.25 # via llama-index llama-index-readers-llama-parse==0.1.4 # via llama-index llama-parse==0.4.4 # via llama-index-readers-llama-parse llamaindex-py-client==0.1.19 # via # llama-index-core # llama-index-indices-managed-llama-cloud llvmlite==0.43.0 # via numba markdown-it-py==3.0.0 # via rich markupsafe==2.1.5 # via jinja2 marshmallow==3.21.3 # via dataclasses-json mdurl==0.1.2 # via markdown-it-py multidict==6.0.5 # via # aiohttp # yarl multiprocess==0.70.16 # via datasets mypy-extensions==1.0.0 # via typing-inspect nest-asyncio==1.6.0 # via # llama-index-core # llama-index-legacy # outlines networkx==3.3 # via # llama-index-core # llama-index-legacy nltk==3.8.1 # via # llama-index-core # llama-index-legacy numba==0.60.0 # via outlines numpy==1.26.4 # via # datasets # llama-index-core # llama-index-legacy # numba # outlines # pandas # pyarrow openai==1.34.0 # via # llama-index-agent-openai # llama-index-core # llama-index-legacy outlines==0.0.45 # via -r notebooks/requirements_cpu.in packaging==24.1 # via # datasets # huggingface-hub # marshmallow pandas==2.2.2 # via # datasets # llama-index-core # llama-index-legacy pillow==10.3.0 # via llama-index-core pyairports==2.1.1 # via outlines pyarrow==16.1.0 # via datasets pyarrow-hotfix==0.6 # via datasets pycountry==24.6.1 # via outlines pydantic==2.7.4 # via # llamaindex-py-client # openai # outlines pydantic-core==2.18.4 # via pydantic pygments==2.18.0 # via rich pypdf==4.2.0 # via llama-index-readers-file python-dateutil==2.9.0.post0 # via pandas pytz==2024.1 # via pandas pyyaml==6.0.1 # via # datasets # huggingface-hub # llama-index-core referencing==0.35.1 # via # jsonschema # jsonschema-specifications # outlines regex==2024.5.15 # via # nltk # tiktoken requests==2.32.3 # via # datasets # huggingface-hub # llama-index-core # llama-index-legacy # outlines # tiktoken rich==13.7.1 # via -r notebooks/requirements_cpu.in rpds-py==0.18.1 # via # jsonschema # referencing six==1.16.0 # via python-dateutil sniffio==1.3.1 # via # anyio # httpx # openai soupsieve==2.5 # via beautifulsoup4 sqlalchemy==2.0.31 # via # llama-index-core # llama-index-legacy striprtf==0.0.26 # via llama-index-readers-file tenacity==8.3.0 # via # llama-index-core # llama-index-legacy tiktoken==0.7.0 # via # llama-index-core # llama-index-legacy tqdm==4.66.4 # via # datasets # huggingface-hub # llama-index-core # nltk # openai # outlines typing-extensions==4.12.2 # via # huggingface-hub # llama-index-core # llama-index-legacy # openai # outlines # pydantic # pydantic-core # sqlalchemy # typing-inspect typing-inspect==0.9.0 # via # dataclasses-json # llama-index-core # llama-index-legacy tzdata==2024.1 # via pandas urllib3==2.2.2 # via requests wrapt==1.16.0 # via # deprecated # llama-index-core xxhash==3.4.1 # via datasets yarl==1.9.4 # via aiohttp