freddyaboulton HF staff commited on
Commit
72c7dcc
1 Parent(s): 59b86c6

Upload folder using huggingface_hub

Browse files
lazy_caching_examples_testcase.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+
3
+ def generate(
4
+ message: str,
5
+ chat_history: list[dict],
6
+ ):
7
+
8
+ output = ""
9
+ for character in message:
10
+ output += character
11
+ yield output
12
+
13
+
14
+ demo = gr.ChatInterface(
15
+ fn=generate,
16
+ examples=[
17
+ ["Hey"],
18
+ ["Can you explain briefly to me what is the Python programming language?"],
19
+ ],
20
+ cache_examples=True,
21
+ cache_mode="lazy",
22
+ type="messages",
23
+ )
24
+
25
+
26
+ if __name__ == "__main__":
27
+ demo.launch()
requirements.txt CHANGED
@@ -1,2 +1,2 @@
1
- gradio-client @ git+https://github.com/gradio-app/gradio@369a44e7f8c829b3c0e21830d3c2e92b89b4c243#subdirectory=client/python
2
- https://gradio-pypi-previews.s3.amazonaws.com/369a44e7f8c829b3c0e21830d3c2e92b89b4c243/gradio-5.6.0-py3-none-any.whl
 
1
+ gradio-client @ git+https://github.com/gradio-app/gradio@db162bfc5907d16b437443fef3c6972065cfa836#subdirectory=client/python
2
+ https://gradio-pypi-previews.s3.amazonaws.com/db162bfc5907d16b437443fef3c6972065cfa836/gradio-5.6.0-py3-none-any.whl
run.ipynb CHANGED
@@ -1 +1 @@
1
- {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: test_chatinterface_examples"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["# Downloading files from the demo repo\n", "import os\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/test_chatinterface_examples/eager_caching_examples_testcase.py\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/test_chatinterface_examples/multimodal_messages_examples_testcase.py\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/test_chatinterface_examples/multimodal_tuples_examples_testcase.py\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/test_chatinterface_examples/tuples_examples_testcase.py"]}, {"cell_type": "code", "execution_count": null, "id": "44380577570523278879349135829904343037", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "def generate(\n", " message: str,\n", " chat_history: list[dict],\n", "):\n", "\n", " output = \"\"\n", " for character in message:\n", " output += character\n", " yield output\n", "\n", "\n", "demo = gr.ChatInterface(\n", " fn=generate,\n", " examples=[\n", " [\"Hey\"],\n", " [\"Can you explain briefly to me what is the Python programming language?\"],\n", " ],\n", " cache_examples=False,\n", " type=\"messages\",\n", ")\n", "\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
 
1
+ {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: test_chatinterface_examples"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["# Downloading files from the demo repo\n", "import os\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/test_chatinterface_examples/eager_caching_examples_testcase.py\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/test_chatinterface_examples/lazy_caching_examples_testcase.py\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/test_chatinterface_examples/multimodal_messages_examples_testcase.py\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/test_chatinterface_examples/multimodal_tuples_examples_testcase.py\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/test_chatinterface_examples/tuples_examples_testcase.py"]}, {"cell_type": "code", "execution_count": null, "id": "44380577570523278879349135829904343037", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "def generate(\n", " message: str,\n", " chat_history: list[dict],\n", "):\n", "\n", " output = \"\"\n", " for character in message:\n", " output += character\n", " yield output\n", "\n", "\n", "demo = gr.ChatInterface(\n", " fn=generate,\n", " examples=[\n", " [\"Hey\"],\n", " [\"Can you explain briefly to me what is the Python programming language?\"],\n", " ],\n", " cache_examples=False,\n", " type=\"messages\",\n", ")\n", "\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}