HydroxApp_t2t / pyproject.toml
yangzzay's picture
Upload folder using huggingface_hub
02a7b98
[tool.poetry]
name = "llama2-wrapper"
version = "0.1.14"
description = "Use llama2-wrapper as your local llama2 backend for Generative Agents / Apps"
authors = ["liltom-eth <liltom.eth@gmail.com>"]
license = "MIT"
homepage = "https://github.com/liltom-eth/llama2-webui"
repository = "https://github.com/liltom-eth/llama2-webui"
readme = "./docs/pypi.md"
packages = [{include = "llama2_wrapper"}]
[tool.poetry.dependencies]
python = ">=3.10,<3.13"
accelerate = "^0.21.0"
auto-gptq = "0.3.0"
gradio = "3.37.0"
protobuf = "3.20.3"
scipy = "1.11.1"
sentencepiece = "0.1.99"
torch = "2.0.1"
transformers = "4.31.0"
tqdm = "4.65.0"
python-dotenv = "1.0.0"
llama-cpp-python = "0.2.11"
bitsandbytes = [
{platform = 'linux', version = "0.40.2"},
{platform = 'darwin', version = "0.40.2"},
]
memory-profiler = "0.61.0"
huggingface-hub = "0.16.4"
fastapi = "0.100.0"
uvicorn = "0.23.1"
sse-starlette = "1.6.5"
pydantic = "2.2.1"
pydantic-settings = "2.0.3"
pytest = "7.4.0"
black = "23.7.0"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[virtualenvs]
create = true
in-project = true