Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Install transformers-gradio with pre-requirements.txt
Browse files- pre-requirements.txt +1 -0
- pyproject.toml +0 -2
- requirements.txt +4 -102
- uv.lock +0 -0
pre-requirements.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
git+https://github.com/AK391/transformers-gradio.git
|
pyproject.toml
CHANGED
@@ -23,7 +23,6 @@ dependencies = [
|
|
23 |
"spaces>=0.30.4",
|
24 |
"together-gradio>=0.0.1",
|
25 |
"torch==2.4.0",
|
26 |
-
"transformers-gradio",
|
27 |
"xai-gradio>=0.0.2",
|
28 |
]
|
29 |
|
@@ -35,7 +34,6 @@ nvidia-gradio = { git = "https://github.com/AK391/nvidia-gradio.git" }
|
|
35 |
dashscope-gradio = { git = "https://github.com/AK391/dashscope-gradio.git" }
|
36 |
fal-gradio = { git = "https://github.com/AK391/fal-gradio.git" }
|
37 |
replicate-gradio = { git = "https://github.com/AK391/replicate-gradio.git" }
|
38 |
-
transformers-gradio = { git = "https://github.com/AK391/transformers-gradio.git" }
|
39 |
|
40 |
[tool.ruff]
|
41 |
# Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default.
|
|
|
23 |
"spaces>=0.30.4",
|
24 |
"together-gradio>=0.0.1",
|
25 |
"torch==2.4.0",
|
|
|
26 |
"xai-gradio>=0.0.2",
|
27 |
]
|
28 |
|
|
|
34 |
dashscope-gradio = { git = "https://github.com/AK391/dashscope-gradio.git" }
|
35 |
fal-gradio = { git = "https://github.com/AK391/fal-gradio.git" }
|
36 |
replicate-gradio = { git = "https://github.com/AK391/replicate-gradio.git" }
|
|
|
37 |
|
38 |
[tool.ruff]
|
39 |
# Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default.
|
requirements.txt
CHANGED
@@ -1,17 +1,7 @@
|
|
1 |
# This file was autogenerated by uv via the following command:
|
2 |
# uv pip compile pyproject.toml -o requirements.txt
|
3 |
-
accelerate==1.1.1
|
4 |
-
# via transformers-gradio
|
5 |
aiofiles==23.2.1
|
6 |
# via gradio
|
7 |
-
aiohappyeyeballs==2.4.3
|
8 |
-
# via aiohttp
|
9 |
-
aiohttp==3.11.8
|
10 |
-
# via
|
11 |
-
# datasets
|
12 |
-
# fsspec
|
13 |
-
aiosignal==1.3.1
|
14 |
-
# via aiohttp
|
15 |
annotated-types==0.7.0
|
16 |
# via pydantic
|
17 |
anthropic==0.39.0
|
@@ -27,12 +17,6 @@ anyio==4.6.2.post1
|
|
27 |
# httpx
|
28 |
# openai
|
29 |
# starlette
|
30 |
-
async-timeout==5.0.1
|
31 |
-
# via aiohttp
|
32 |
-
attrs==24.2.0
|
33 |
-
# via aiohttp
|
34 |
-
bitsandbytes==0.44.1
|
35 |
-
# via transformers-gradio
|
36 |
cachetools==5.5.0
|
37 |
# via google-auth
|
38 |
cerebras-cloud-sdk==1.12.1
|
@@ -50,16 +34,8 @@ click==8.1.7
|
|
50 |
# via
|
51 |
# typer
|
52 |
# uvicorn
|
53 |
-
coloredlogs==15.0.1
|
54 |
-
# via optimum
|
55 |
dashscope-gradio @ git+https://github.com/AK391/dashscope-gradio.git@3a8bab36087cbf5efcde17c695a10e0229784db4
|
56 |
# via anychat (pyproject.toml)
|
57 |
-
datasets==2.14.4
|
58 |
-
# via optimum
|
59 |
-
dill==0.3.7
|
60 |
-
# via
|
61 |
-
# datasets
|
62 |
-
# multiprocess
|
63 |
distro==1.9.0
|
64 |
# via
|
65 |
# anthropic
|
@@ -82,17 +58,11 @@ filelock==3.16.1
|
|
82 |
# via
|
83 |
# huggingface-hub
|
84 |
# torch
|
85 |
-
# transformers
|
86 |
# triton
|
87 |
fireworks-gradio @ git+https://github.com/AK391/fireworks-ai-gradio.git@b85f85bfe777a7ec953c8b996536e1d1cd47ae08
|
88 |
# via anychat (pyproject.toml)
|
89 |
-
frozenlist==1.5.0
|
90 |
-
# via
|
91 |
-
# aiohttp
|
92 |
-
# aiosignal
|
93 |
fsspec==2024.10.0
|
94 |
# via
|
95 |
-
# datasets
|
96 |
# gradio-client
|
97 |
# huggingface-hub
|
98 |
# torch
|
@@ -141,7 +111,6 @@ gradio==5.7.0
|
|
141 |
# sambanova-gradio
|
142 |
# spaces
|
143 |
# together-gradio
|
144 |
-
# transformers-gradio
|
145 |
# xai-gradio
|
146 |
gradio-client==1.5.0
|
147 |
# via gradio
|
@@ -182,15 +151,8 @@ httpx-sse==0.4.0
|
|
182 |
# via fal-client
|
183 |
huggingface-hub==0.26.2
|
184 |
# via
|
185 |
-
# accelerate
|
186 |
-
# datasets
|
187 |
# gradio
|
188 |
# gradio-client
|
189 |
-
# optimum
|
190 |
-
# tokenizers
|
191 |
-
# transformers
|
192 |
-
humanfriendly==10.0
|
193 |
-
# via coloredlogs
|
194 |
hyperbolic-gradio==0.0.4
|
195 |
# via anychat (pyproject.toml)
|
196 |
idna==3.10
|
@@ -198,7 +160,6 @@ idna==3.10
|
|
198 |
# anyio
|
199 |
# httpx
|
200 |
# requests
|
201 |
-
# yarl
|
202 |
jinja2==3.1.4
|
203 |
# via
|
204 |
# gradio
|
@@ -223,25 +184,14 @@ mistralai==1.2.3
|
|
223 |
# via mistral-gradio
|
224 |
mpmath==1.3.0
|
225 |
# via sympy
|
226 |
-
multidict==6.1.0
|
227 |
-
# via
|
228 |
-
# aiohttp
|
229 |
-
# yarl
|
230 |
-
multiprocess==0.70.15
|
231 |
-
# via datasets
|
232 |
mypy-extensions==1.0.0
|
233 |
# via typing-inspect
|
234 |
networkx==3.4.2
|
235 |
# via torch
|
236 |
numpy==2.1.3
|
237 |
# via
|
238 |
-
# accelerate
|
239 |
-
# bitsandbytes
|
240 |
-
# datasets
|
241 |
# gradio
|
242 |
-
# optimum
|
243 |
# pandas
|
244 |
-
# transformers
|
245 |
nvidia-cublas-cu12==12.1.3.1
|
246 |
# via
|
247 |
# nvidia-cudnn-cu12
|
@@ -288,33 +238,21 @@ openai==1.55.0
|
|
288 |
# xai-gradio
|
289 |
openai-gradio==0.0.4
|
290 |
# via anychat (pyproject.toml)
|
291 |
-
optimum==1.23.3
|
292 |
-
# via transformers-gradio
|
293 |
orjson==3.10.11
|
294 |
# via gradio
|
295 |
packaging==24.2
|
296 |
# via
|
297 |
-
# accelerate
|
298 |
-
# datasets
|
299 |
# gradio
|
300 |
# gradio-client
|
301 |
# huggingface-hub
|
302 |
-
# optimum
|
303 |
# replicate
|
304 |
# spaces
|
305 |
-
# transformers
|
306 |
pandas==2.2.3
|
307 |
-
# via
|
308 |
-
# datasets
|
309 |
-
# gradio
|
310 |
perplexity-gradio==0.0.1
|
311 |
# via anychat (pyproject.toml)
|
312 |
pillow==11.0.0
|
313 |
# via gradio
|
314 |
-
propcache==0.2.0
|
315 |
-
# via
|
316 |
-
# aiohttp
|
317 |
-
# yarl
|
318 |
proto-plus==1.25.0
|
319 |
# via
|
320 |
# google-ai-generativelanguage
|
@@ -328,11 +266,7 @@ protobuf==5.28.3
|
|
328 |
# grpcio-status
|
329 |
# proto-plus
|
330 |
psutil==5.9.8
|
331 |
-
# via
|
332 |
-
# accelerate
|
333 |
-
# spaces
|
334 |
-
pyarrow==18.1.0
|
335 |
-
# via datasets
|
336 |
pyasn1==0.6.1
|
337 |
# via
|
338 |
# pyasn1-modules
|
@@ -369,24 +303,17 @@ pytz==2024.2
|
|
369 |
# via pandas
|
370 |
pyyaml==6.0.2
|
371 |
# via
|
372 |
-
# accelerate
|
373 |
-
# datasets
|
374 |
# gradio
|
375 |
# huggingface-hub
|
376 |
-
# transformers
|
377 |
-
regex==2024.11.6
|
378 |
-
# via transformers
|
379 |
replicate==1.0.4
|
380 |
# via replicate-gradio
|
381 |
replicate-gradio @ git+https://github.com/AK391/replicate-gradio.git@f5ad513cc8e3e38a33e2f36906b61082bd96e3b0
|
382 |
# via anychat (pyproject.toml)
|
383 |
requests==2.32.3
|
384 |
# via
|
385 |
-
# datasets
|
386 |
# google-api-core
|
387 |
# huggingface-hub
|
388 |
# spaces
|
389 |
-
# transformers
|
390 |
rich==13.9.4
|
391 |
# via typer
|
392 |
rsa==4.9
|
@@ -395,10 +322,6 @@ ruff==0.7.4
|
|
395 |
# via gradio
|
396 |
safehttpx==0.1.1
|
397 |
# via gradio
|
398 |
-
safetensors==0.4.5
|
399 |
-
# via
|
400 |
-
# accelerate
|
401 |
-
# transformers
|
402 |
sambanova-gradio==0.1.9
|
403 |
# via anychat (pyproject.toml)
|
404 |
semantic-version==2.10.0
|
@@ -422,34 +345,18 @@ starlette==0.41.3
|
|
422 |
# fastapi
|
423 |
# gradio
|
424 |
sympy==1.13.1
|
425 |
-
# via
|
426 |
-
# optimum
|
427 |
-
# torch
|
428 |
together-gradio==0.0.1
|
429 |
# via anychat (pyproject.toml)
|
430 |
-
tokenizers==0.20.3
|
431 |
-
# via transformers
|
432 |
tomlkit==0.12.0
|
433 |
# via gradio
|
434 |
torch==2.4.0
|
435 |
-
# via
|
436 |
-
# anychat (pyproject.toml)
|
437 |
-
# accelerate
|
438 |
-
# bitsandbytes
|
439 |
-
# optimum
|
440 |
tqdm==4.67.0
|
441 |
# via
|
442 |
-
# datasets
|
443 |
# google-generativeai
|
444 |
# huggingface-hub
|
445 |
# openai
|
446 |
-
# transformers
|
447 |
-
transformers @ git+https://github.com/huggingface/transformers.git@5523e38b553ff6c46b04d2376870fcd842feeecc
|
448 |
-
# via
|
449 |
-
# optimum
|
450 |
-
# transformers-gradio
|
451 |
-
transformers-gradio @ git+https://github.com/AK391/transformers-gradio.git@3152ffa7e80712d7057d1acb0b32bfc408e7b2f5
|
452 |
-
# via anychat (pyproject.toml)
|
453 |
triton==3.0.0
|
454 |
# via torch
|
455 |
typer==0.13.1
|
@@ -465,7 +372,6 @@ typing-extensions==4.12.2
|
|
465 |
# gradio-client
|
466 |
# groq
|
467 |
# huggingface-hub
|
468 |
-
# multidict
|
469 |
# openai
|
470 |
# pydantic
|
471 |
# pydantic-core
|
@@ -490,7 +396,3 @@ websockets==12.0
|
|
490 |
# via gradio-client
|
491 |
xai-gradio==0.0.2
|
492 |
# via anychat (pyproject.toml)
|
493 |
-
xxhash==3.5.0
|
494 |
-
# via datasets
|
495 |
-
yarl==1.18.0
|
496 |
-
# via aiohttp
|
|
|
1 |
# This file was autogenerated by uv via the following command:
|
2 |
# uv pip compile pyproject.toml -o requirements.txt
|
|
|
|
|
3 |
aiofiles==23.2.1
|
4 |
# via gradio
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
annotated-types==0.7.0
|
6 |
# via pydantic
|
7 |
anthropic==0.39.0
|
|
|
17 |
# httpx
|
18 |
# openai
|
19 |
# starlette
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
cachetools==5.5.0
|
21 |
# via google-auth
|
22 |
cerebras-cloud-sdk==1.12.1
|
|
|
34 |
# via
|
35 |
# typer
|
36 |
# uvicorn
|
|
|
|
|
37 |
dashscope-gradio @ git+https://github.com/AK391/dashscope-gradio.git@3a8bab36087cbf5efcde17c695a10e0229784db4
|
38 |
# via anychat (pyproject.toml)
|
|
|
|
|
|
|
|
|
|
|
|
|
39 |
distro==1.9.0
|
40 |
# via
|
41 |
# anthropic
|
|
|
58 |
# via
|
59 |
# huggingface-hub
|
60 |
# torch
|
|
|
61 |
# triton
|
62 |
fireworks-gradio @ git+https://github.com/AK391/fireworks-ai-gradio.git@b85f85bfe777a7ec953c8b996536e1d1cd47ae08
|
63 |
# via anychat (pyproject.toml)
|
|
|
|
|
|
|
|
|
64 |
fsspec==2024.10.0
|
65 |
# via
|
|
|
66 |
# gradio-client
|
67 |
# huggingface-hub
|
68 |
# torch
|
|
|
111 |
# sambanova-gradio
|
112 |
# spaces
|
113 |
# together-gradio
|
|
|
114 |
# xai-gradio
|
115 |
gradio-client==1.5.0
|
116 |
# via gradio
|
|
|
151 |
# via fal-client
|
152 |
huggingface-hub==0.26.2
|
153 |
# via
|
|
|
|
|
154 |
# gradio
|
155 |
# gradio-client
|
|
|
|
|
|
|
|
|
|
|
156 |
hyperbolic-gradio==0.0.4
|
157 |
# via anychat (pyproject.toml)
|
158 |
idna==3.10
|
|
|
160 |
# anyio
|
161 |
# httpx
|
162 |
# requests
|
|
|
163 |
jinja2==3.1.4
|
164 |
# via
|
165 |
# gradio
|
|
|
184 |
# via mistral-gradio
|
185 |
mpmath==1.3.0
|
186 |
# via sympy
|
|
|
|
|
|
|
|
|
|
|
|
|
187 |
mypy-extensions==1.0.0
|
188 |
# via typing-inspect
|
189 |
networkx==3.4.2
|
190 |
# via torch
|
191 |
numpy==2.1.3
|
192 |
# via
|
|
|
|
|
|
|
193 |
# gradio
|
|
|
194 |
# pandas
|
|
|
195 |
nvidia-cublas-cu12==12.1.3.1
|
196 |
# via
|
197 |
# nvidia-cudnn-cu12
|
|
|
238 |
# xai-gradio
|
239 |
openai-gradio==0.0.4
|
240 |
# via anychat (pyproject.toml)
|
|
|
|
|
241 |
orjson==3.10.11
|
242 |
# via gradio
|
243 |
packaging==24.2
|
244 |
# via
|
|
|
|
|
245 |
# gradio
|
246 |
# gradio-client
|
247 |
# huggingface-hub
|
|
|
248 |
# replicate
|
249 |
# spaces
|
|
|
250 |
pandas==2.2.3
|
251 |
+
# via gradio
|
|
|
|
|
252 |
perplexity-gradio==0.0.1
|
253 |
# via anychat (pyproject.toml)
|
254 |
pillow==11.0.0
|
255 |
# via gradio
|
|
|
|
|
|
|
|
|
256 |
proto-plus==1.25.0
|
257 |
# via
|
258 |
# google-ai-generativelanguage
|
|
|
266 |
# grpcio-status
|
267 |
# proto-plus
|
268 |
psutil==5.9.8
|
269 |
+
# via spaces
|
|
|
|
|
|
|
|
|
270 |
pyasn1==0.6.1
|
271 |
# via
|
272 |
# pyasn1-modules
|
|
|
303 |
# via pandas
|
304 |
pyyaml==6.0.2
|
305 |
# via
|
|
|
|
|
306 |
# gradio
|
307 |
# huggingface-hub
|
|
|
|
|
|
|
308 |
replicate==1.0.4
|
309 |
# via replicate-gradio
|
310 |
replicate-gradio @ git+https://github.com/AK391/replicate-gradio.git@f5ad513cc8e3e38a33e2f36906b61082bd96e3b0
|
311 |
# via anychat (pyproject.toml)
|
312 |
requests==2.32.3
|
313 |
# via
|
|
|
314 |
# google-api-core
|
315 |
# huggingface-hub
|
316 |
# spaces
|
|
|
317 |
rich==13.9.4
|
318 |
# via typer
|
319 |
rsa==4.9
|
|
|
322 |
# via gradio
|
323 |
safehttpx==0.1.1
|
324 |
# via gradio
|
|
|
|
|
|
|
|
|
325 |
sambanova-gradio==0.1.9
|
326 |
# via anychat (pyproject.toml)
|
327 |
semantic-version==2.10.0
|
|
|
345 |
# fastapi
|
346 |
# gradio
|
347 |
sympy==1.13.1
|
348 |
+
# via torch
|
|
|
|
|
349 |
together-gradio==0.0.1
|
350 |
# via anychat (pyproject.toml)
|
|
|
|
|
351 |
tomlkit==0.12.0
|
352 |
# via gradio
|
353 |
torch==2.4.0
|
354 |
+
# via anychat (pyproject.toml)
|
|
|
|
|
|
|
|
|
355 |
tqdm==4.67.0
|
356 |
# via
|
|
|
357 |
# google-generativeai
|
358 |
# huggingface-hub
|
359 |
# openai
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
360 |
triton==3.0.0
|
361 |
# via torch
|
362 |
typer==0.13.1
|
|
|
372 |
# gradio-client
|
373 |
# groq
|
374 |
# huggingface-hub
|
|
|
375 |
# openai
|
376 |
# pydantic
|
377 |
# pydantic-core
|
|
|
396 |
# via gradio-client
|
397 |
xai-gradio==0.0.2
|
398 |
# via anychat (pyproject.toml)
|
|
|
|
|
|
|
|
uv.lock
CHANGED
The diff for this file is too large to render.
See raw diff
|
|