fire_stang3 / pyproject.toml
manbeast3b's picture
Update pyproject.toml
5396446 verified
[build-system]
requires = ["setuptools >= 75.0"]
build-backend = "setuptools.build_meta"
[project]
name = "flux-schnell-edge-inference"
description = "An edge-maxxing model submission by RobertML for the 4090 Flux contest"
requires-python = ">=3.10,<3.13"
version = "8"
dependencies = [
"diffusers==0.31.0",
"transformers==4.46.2",
"accelerate==1.1.0",
"omegaconf==2.3.0",
"torch==2.6.0",
"protobuf==5.28.3",
"sentencepiece==0.2.0",
"edge-maxxing-pipelines @ git+https://github.com/womboai/edge-maxxing@7c760ac54f6052803dadb3ade8ebfc9679a94589#subdirectory=pipelines",
"gitpython>=3.1.43",
"hf_transfer==0.1.8",
"torchao==0.6.1",
"setuptools>=75.3.0",
"para-attn==0.3.15",
"git-lfs<=1.6"
]
[[tool.edge-maxxing.models]]
repository = "black-forest-labs/FLUX.1-schnell"
revision = "741f7c3ce8b383c54771c7003378a50191e9efe9"
exclude = ["transformer"]
[[tool.edge-maxxing.models]]
repository = "manbeast3b/flux.1-schnell-full1"
revision = "cb1b599b0d712b9aab2c4df3ad27b050a27ec146"
[[tool.edge-maxxing.models]]
repository = "city96/t5-v1_1-xxl-encoder-bf16"
revision = "1b9c856aadb864af93c1dcdc226c2774fa67bc86"
[[tool.edge-maxxing.models]]
repository = "manbeast3b/FLUX.1-schnell-taef1-float8"
revision = "7c538d53ec698509788ed88b1305c6bb019bdb4d"
[[tool.edge-maxxing.models]]
repository = "RobertML/FLUX.1-schnell-vae_e3m2"
revision = "da0d2cd7815792fb40d084dbd8ed32b63f153d8d"
[project.scripts]
start_inference = "main:main"