Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -118,14 +118,16 @@ except Exception as e:
|
|
118 |
claude_client = None
|
119 |
openai_client = None
|
120 |
|
121 |
-
|
122 |
async def try_claude_api(system_message, claude_messages, timeout=15):
|
123 |
if not claude_client:
|
124 |
raise Exception("Claude client not initialized")
|
125 |
|
126 |
try:
|
127 |
start_time = time.time()
|
128 |
-
|
|
|
|
|
129 |
with claude_client.messages.stream(
|
130 |
model="claude-3-5-sonnet-20241022",
|
131 |
max_tokens=7800,
|
@@ -139,19 +141,24 @@ async def try_claude_api(system_message, claude_messages, timeout=15):
|
|
139 |
yield collected_content
|
140 |
await asyncio.sleep(0)
|
141 |
|
|
|
|
|
|
|
|
|
142 |
except asyncio.TimeoutError:
|
143 |
raise TimeoutError(f"Claude API timeout after {timeout} seconds")
|
144 |
except Exception as e:
|
145 |
print(f"Claude API error: {str(e)}")
|
146 |
raise
|
147 |
|
|
|
148 |
async def try_openai_api(openai_messages):
|
149 |
if not openai_client:
|
150 |
raise Exception("OpenAI client not initialized")
|
151 |
|
152 |
try:
|
153 |
-
|
154 |
-
model="gpt-4",
|
155 |
messages=openai_messages,
|
156 |
stream=True,
|
157 |
max_tokens=4096,
|
@@ -159,7 +166,7 @@ async def try_openai_api(openai_messages):
|
|
159 |
)
|
160 |
|
161 |
collected_content = ""
|
162 |
-
async for chunk in
|
163 |
if chunk.choices[0].delta.content:
|
164 |
collected_content += chunk.choices[0].delta.content
|
165 |
yield collected_content
|
@@ -168,8 +175,6 @@ async def try_openai_api(openai_messages):
|
|
168 |
print(f"OpenAI API error: {str(e)}")
|
169 |
raise
|
170 |
|
171 |
-
|
172 |
-
|
173 |
class Demo:
|
174 |
def __init__(self):
|
175 |
pass
|
@@ -1775,60 +1780,101 @@ def create_main_interface():
|
|
1775 |
return None, gr.update(active_key="empty")
|
1776 |
|
1777 |
demo = gr.Blocks(css="""
|
1778 |
-
|
1779 |
-
|
1780 |
-
|
1781 |
-
|
1782 |
-
|
1783 |
-
|
1784 |
-
|
1785 |
-
|
1786 |
-
|
1787 |
-
|
1788 |
-
|
1789 |
-
|
1790 |
-
|
1791 |
-
|
1792 |
-
|
1793 |
-
|
1794 |
-
|
1795 |
-
|
1796 |
-
|
1797 |
-
|
1798 |
-
|
1799 |
-
|
1800 |
-
|
1801 |
-
|
1802 |
-
|
1803 |
-
|
1804 |
-
|
1805 |
-
|
1806 |
-
|
1807 |
-
|
1808 |
-
|
1809 |
-
|
1810 |
-
|
1811 |
-
|
1812 |
-
|
1813 |
-
|
1814 |
-
|
1815 |
-
|
1816 |
-
|
1817 |
-
|
1818 |
-
|
1819 |
-
|
1820 |
-
|
1821 |
-
|
1822 |
-
|
1823 |
-
|
1824 |
-
|
1825 |
-
|
1826 |
-
|
1827 |
-
|
1828 |
-
|
1829 |
-
|
1830 |
-
|
1831 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1832 |
|
1833 |
with demo:
|
1834 |
with gr.Tabs(elem_classes="main-tabs") as tabs:
|
|
|
118 |
claude_client = None
|
119 |
openai_client = None
|
120 |
|
121 |
+
# try_claude_api ν¨μ μμ
|
122 |
async def try_claude_api(system_message, claude_messages, timeout=15):
|
123 |
if not claude_client:
|
124 |
raise Exception("Claude client not initialized")
|
125 |
|
126 |
try:
|
127 |
start_time = time.time()
|
128 |
+
|
129 |
+
# asyncio.timeout() λμ asyncio.wait_for() μ¬μ©
|
130 |
+
async def stream_content():
|
131 |
with claude_client.messages.stream(
|
132 |
model="claude-3-5-sonnet-20241022",
|
133 |
max_tokens=7800,
|
|
|
141 |
yield collected_content
|
142 |
await asyncio.sleep(0)
|
143 |
|
144 |
+
# asyncio.wait_for()λ‘ νμμμ μ€μ
|
145 |
+
async for content in asyncio.wait_for(stream_content(), timeout):
|
146 |
+
yield content
|
147 |
+
|
148 |
except asyncio.TimeoutError:
|
149 |
raise TimeoutError(f"Claude API timeout after {timeout} seconds")
|
150 |
except Exception as e:
|
151 |
print(f"Claude API error: {str(e)}")
|
152 |
raise
|
153 |
|
154 |
+
# try_openai_api ν¨μ μμ
|
155 |
async def try_openai_api(openai_messages):
|
156 |
if not openai_client:
|
157 |
raise Exception("OpenAI client not initialized")
|
158 |
|
159 |
try:
|
160 |
+
response = await openai_client.chat.completions.create(
|
161 |
+
model="gpt-4",
|
162 |
messages=openai_messages,
|
163 |
stream=True,
|
164 |
max_tokens=4096,
|
|
|
166 |
)
|
167 |
|
168 |
collected_content = ""
|
169 |
+
async for chunk in response:
|
170 |
if chunk.choices[0].delta.content:
|
171 |
collected_content += chunk.choices[0].delta.content
|
172 |
yield collected_content
|
|
|
175 |
print(f"OpenAI API error: {str(e)}")
|
176 |
raise
|
177 |
|
|
|
|
|
178 |
class Demo:
|
179 |
def __init__(self):
|
180 |
pass
|
|
|
1780 |
return None, gr.update(active_key="empty")
|
1781 |
|
1782 |
demo = gr.Blocks(css="""
|
1783 |
+
/* λ©μΈ ν μ€νμΌ */
|
1784 |
+
.main-tabs > div.tab-nav > button {
|
1785 |
+
font-size: 1.1em !important;
|
1786 |
+
padding: 0.5em 1em !important;
|
1787 |
+
background: rgba(255, 255, 255, 0.8) !important;
|
1788 |
+
border: none !important;
|
1789 |
+
border-radius: 8px 8px 0 0 !important;
|
1790 |
+
margin-right: 4px !important;
|
1791 |
+
}
|
1792 |
+
.main-tabs > div.tab-nav > button.selected {
|
1793 |
+
background: linear-gradient(45deg, #0084ff, #00a3ff) !important;
|
1794 |
+
color: white !important;
|
1795 |
+
}
|
1796 |
+
.main-tabs {
|
1797 |
+
margin-top: -20px !important;
|
1798 |
+
border-radius: 0 0 15px 15px !important;
|
1799 |
+
box-shadow: 0 4px 15px rgba(0,0,0,0.1) !important;
|
1800 |
+
}
|
1801 |
+
|
1802 |
+
/* MOUSE μΈν°νμ΄μ€ μ€νμΌ */
|
1803 |
+
.left_header {
|
1804 |
+
text-align: center;
|
1805 |
+
margin-bottom: 20px;
|
1806 |
+
}
|
1807 |
+
.right_panel {
|
1808 |
+
background: white;
|
1809 |
+
border-radius: 15px;
|
1810 |
+
padding: 20px;
|
1811 |
+
box-shadow: 0 4px 15px rgba(0,0,0,0.1);
|
1812 |
+
height: calc(100vh - 100px); /* λμ΄ μ‘°μ */
|
1813 |
+
min-height: 800px; /* μ΅μ λμ΄ μ€μ */
|
1814 |
+
}
|
1815 |
+
.setting-buttons {
|
1816 |
+
margin-bottom: 15px;
|
1817 |
+
}
|
1818 |
+
.render_header {
|
1819 |
+
background: #f5f5f5;
|
1820 |
+
padding: 10px;
|
1821 |
+
border-radius: 8px;
|
1822 |
+
margin-bottom: 15px;
|
1823 |
+
}
|
1824 |
+
.header_btn {
|
1825 |
+
display: inline-block;
|
1826 |
+
width: 12px;
|
1827 |
+
height: 12px;
|
1828 |
+
border-radius: 50%;
|
1829 |
+
margin-right: 8px;
|
1830 |
+
background: #ddd;
|
1831 |
+
}
|
1832 |
+
.html_content {
|
1833 |
+
height: calc(100vh - 200px); /* λμ΄ μ‘°μ */
|
1834 |
+
min-height: 700px; /* μ΅μ λμ΄ μ€μ */
|
1835 |
+
border: 1px solid #eee;
|
1836 |
+
border-radius: 8px;
|
1837 |
+
overflow: hidden;
|
1838 |
+
}
|
1839 |
+
|
1840 |
+
/* μ
λ ₯ μμ λμ΄ μ‘°μ */
|
1841 |
+
.ant-input-textarea-large textarea {
|
1842 |
+
height: 400px !important; /* μ
λ ₯μ°½ λμ΄ μ¦κ° */
|
1843 |
+
min-height: 400px !important;
|
1844 |
+
}
|
1845 |
+
|
1846 |
+
/* μ€ν¬λ‘€λ° μ€νμΌλ§ */
|
1847 |
+
.html_content::-webkit-scrollbar {
|
1848 |
+
width: 8px;
|
1849 |
+
}
|
1850 |
+
.html_content::-webkit-scrollbar-track {
|
1851 |
+
background: #f1f1f1;
|
1852 |
+
border-radius: 4px;
|
1853 |
+
}
|
1854 |
+
.html_content::-webkit-scrollbar-thumb {
|
1855 |
+
background: #888;
|
1856 |
+
border-radius: 4px;
|
1857 |
+
}
|
1858 |
+
.html_content::-webkit-scrollbar-thumb:hover {
|
1859 |
+
background: #555;
|
1860 |
+
}
|
1861 |
+
|
1862 |
+
/* λ°μν λμ΄ μ‘°μ */
|
1863 |
+
@media screen and (max-height: 900px) {
|
1864 |
+
.right_panel {
|
1865 |
+
height: calc(100vh - 80px);
|
1866 |
+
min-height: 600px;
|
1867 |
+
}
|
1868 |
+
.html_content {
|
1869 |
+
height: calc(100vh - 160px);
|
1870 |
+
min-height: 500px;
|
1871 |
+
}
|
1872 |
+
.ant-input-textarea-large textarea {
|
1873 |
+
height: 300px !important;
|
1874 |
+
min-height: 300px !important;
|
1875 |
+
}
|
1876 |
+
}
|
1877 |
+
""", theme=theme)
|
1878 |
|
1879 |
with demo:
|
1880 |
with gr.Tabs(elem_classes="main-tabs") as tabs:
|