Spaces:
Running
Running
Médéric Hurier (Fmind)
commited on
Commit
·
03cbf3b
1
Parent(s):
6f513df
updated application
Browse files- .python-version +1 -1
- app.py +32 -23
- gradio_cached_examples/13/indices.csv +1 -0
- gradio_cached_examples/13/log.csv +2 -0
- pyproject.toml +2 -2
- tasks/cleans.py +2 -1
.python-version
CHANGED
@@ -1 +1 @@
|
|
1 |
-
3.
|
|
|
1 |
+
3.11
|
app.py
CHANGED
@@ -3,19 +3,22 @@
|
|
3 |
# %% IMPORTS
|
4 |
|
5 |
import logging
|
6 |
-
import os
|
7 |
|
8 |
import gradio as gr
|
9 |
from openai import OpenAI
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
|
11 |
# %% CONFIGS
|
12 |
|
13 |
# %% - Models
|
14 |
|
15 |
MODEL_NAME = "gpt-3.5-turbo"
|
16 |
-
MODEL_STREAM = True
|
17 |
MODEL_TEMPERATURE = 0.0
|
18 |
-
MODEL_API_KEY = os.environ["OPENAI_API_KEY"]
|
19 |
|
20 |
# %% - Prompts
|
21 |
|
@@ -29,9 +32,9 @@ PROMPT_SYSTEM = PROMPT_INSTRUCTIONS + PROMPT_CONTEXT
|
|
29 |
|
30 |
# %% - Interfaces
|
31 |
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
"Who is Médéric Hurier (Fmind)?",
|
36 |
"Is Fmind open to new opportunities?",
|
37 |
"Can you share details about Médéric PhD?",
|
@@ -39,17 +42,19 @@ EXAMPLES = [
|
|
39 |
"Describe his proficiency with Python programming",
|
40 |
"What is the answer to life, the universe, and everything?",
|
41 |
]
|
42 |
-
|
43 |
"<center>"
|
44 |
"Visit my website: <a href='https://fmind.dev'>https://fmind.dev</a>"
|
45 |
" - Médéric HURIER (Fmind)"
|
46 |
-
" - Freelancer: AI/FM/MLOps Engineer | Data Scientist | MLOps Community Organizer |
|
47 |
"</center>"
|
48 |
)
|
|
|
|
|
49 |
|
50 |
# %% CLIENTS
|
51 |
|
52 |
-
client = OpenAI(
|
53 |
|
54 |
# %% LOGGING
|
55 |
|
@@ -63,23 +68,25 @@ logging.basicConfig(
|
|
63 |
|
64 |
def answer(message: str, history: list[tuple[str, str]]) -> str:
|
65 |
"""Answer questions about my resume."""
|
66 |
-
#
|
67 |
-
messages = []
|
68 |
-
messages += [
|
69 |
for user, assistant in history:
|
70 |
-
messages += [
|
71 |
-
messages += [
|
72 |
-
messages += [
|
73 |
-
#
|
74 |
-
# - response
|
75 |
response = client.chat.completions.create(
|
76 |
model=MODEL_NAME,
|
77 |
messages=messages,
|
78 |
temperature=MODEL_TEMPERATURE,
|
79 |
)
|
80 |
-
|
|
|
81 |
content = response.choices[0].message.content
|
82 |
-
|
|
|
|
|
83 |
return content
|
84 |
|
85 |
|
@@ -87,10 +94,12 @@ def answer(message: str, history: list[tuple[str, str]]) -> str:
|
|
87 |
|
88 |
interface = gr.ChatInterface(
|
89 |
fn=answer,
|
90 |
-
theme=
|
91 |
-
title=
|
92 |
-
examples=
|
93 |
-
description=
|
|
|
|
|
94 |
clear_btn=None,
|
95 |
retry_btn=None,
|
96 |
undo_btn=None,
|
|
|
3 |
# %% IMPORTS
|
4 |
|
5 |
import logging
|
|
|
6 |
|
7 |
import gradio as gr
|
8 |
from openai import OpenAI
|
9 |
+
from openai.types.chat import (
|
10 |
+
ChatCompletionAssistantMessageParam,
|
11 |
+
ChatCompletionMessageParam,
|
12 |
+
ChatCompletionSystemMessageParam,
|
13 |
+
ChatCompletionUserMessageParam,
|
14 |
+
)
|
15 |
|
16 |
# %% CONFIGS
|
17 |
|
18 |
# %% - Models
|
19 |
|
20 |
MODEL_NAME = "gpt-3.5-turbo"
|
|
|
21 |
MODEL_TEMPERATURE = 0.0
|
|
|
22 |
|
23 |
# %% - Prompts
|
24 |
|
|
|
32 |
|
33 |
# %% - Interfaces
|
34 |
|
35 |
+
INTERFACE_THEME = "base"
|
36 |
+
INTERFACE_TITLE = "Fmind AI Assistant"
|
37 |
+
INTERFACE_EXAMPLES = [
|
38 |
"Who is Médéric Hurier (Fmind)?",
|
39 |
"Is Fmind open to new opportunities?",
|
40 |
"Can you share details about Médéric PhD?",
|
|
|
42 |
"Describe his proficiency with Python programming",
|
43 |
"What is the answer to life, the universe, and everything?",
|
44 |
]
|
45 |
+
INTERFACE_DESCRIPTION = (
|
46 |
"<center>"
|
47 |
"Visit my website: <a href='https://fmind.dev'>https://fmind.dev</a>"
|
48 |
" - Médéric HURIER (Fmind)"
|
49 |
+
" - Freelancer: AI/FM/MLOps Engineer | Data Scientist | MLOps Community Organizer | MLflow Ambassador | Hacker | PhD"
|
50 |
"</center>"
|
51 |
)
|
52 |
+
INTERFACE_CACHE_EXAMPLES = "lazy"
|
53 |
+
INTERFACE_CONCURRENCY_LIMIT = None
|
54 |
|
55 |
# %% CLIENTS
|
56 |
|
57 |
+
client = OpenAI()
|
58 |
|
59 |
# %% LOGGING
|
60 |
|
|
|
68 |
|
69 |
def answer(message: str, history: list[tuple[str, str]]) -> str:
|
70 |
"""Answer questions about my resume."""
|
71 |
+
# messages
|
72 |
+
messages: list[ChatCompletionMessageParam] = []
|
73 |
+
messages += [ChatCompletionSystemMessageParam(role="system", content=PROMPT_SYSTEM)]
|
74 |
for user, assistant in history:
|
75 |
+
messages += [ChatCompletionUserMessageParam(role="user", content=user)]
|
76 |
+
messages += [ChatCompletionAssistantMessageParam(role="assistant", content=assistant)]
|
77 |
+
messages += [ChatCompletionUserMessageParam(role="user", content=message)]
|
78 |
+
# response
|
|
|
79 |
response = client.chat.completions.create(
|
80 |
model=MODEL_NAME,
|
81 |
messages=messages,
|
82 |
temperature=MODEL_TEMPERATURE,
|
83 |
)
|
84 |
+
logging.info("Response: %s", response.usage)
|
85 |
+
# content
|
86 |
content = response.choices[0].message.content
|
87 |
+
if content is None:
|
88 |
+
logging.warning("Response content is None: %s", response)
|
89 |
+
return "[Internal Error] Sorry, I don't have an answer for that."
|
90 |
return content
|
91 |
|
92 |
|
|
|
94 |
|
95 |
interface = gr.ChatInterface(
|
96 |
fn=answer,
|
97 |
+
theme=INTERFACE_THEME,
|
98 |
+
title=INTERFACE_TITLE,
|
99 |
+
examples=INTERFACE_EXAMPLES,
|
100 |
+
description=INTERFACE_DESCRIPTION,
|
101 |
+
cache_examples=INTERFACE_CACHE_EXAMPLES,
|
102 |
+
concurrency_limit=INTERFACE_CONCURRENCY_LIMIT,
|
103 |
clear_btn=None,
|
104 |
retry_btn=None,
|
105 |
undo_btn=None,
|
gradio_cached_examples/13/indices.csv
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
2
|
gradio_cached_examples/13/log.csv
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
Chatbot,flag,username,timestamp
|
2 |
+
"[[""Can you share details about M\u00e9d\u00e9ric PhD?"", ""### Doctor of Philosophy (PhD) in computer security and artificial intelligence\n\n- **School:** University of Luxembourg\n- **Location:** Luxembourg\n- **Grade:** Very Good\n- **Period:** 2015 - 2019\n- **Thesis Title:** Creating better ground truth to further understand Android malware\n\n### Publications related to PhD:\n\n1. **Creating better ground truth to further understand Android malware:**\n - Publisher: University of Luxembourg\n - Publication date: July 1, 2019\n - Link: [Read more](https://orbilu.uni.lu/handle/10993/39903)\n\n2. **Euphony: Harmonious Unification of Cacophonous Anti-Virus Vendor Labels for Android Malware:**\n - Publisher: MSR 2017\n - Publication date: May 21, 2017\n - Link: [Read more](https://orbilu.uni.lu/handle/10993/31441)\n\n3. **On the Lack of Consensus in Anti-Virus Decisions: Metrics and Insights on Building Ground Truths of Android Malware:**\n - Publisher: DIMVA 2016\n - Publication date: July 7, 2016\n - Link: [Read more](https://orbilu.uni.lu/handle/10993/27845)\n\nThese publications showcase M\u00e9d\u00e9ric's research contributions in the field of computer security and artificial intelligence during his PhD studies.""]]",,,2024-05-11 18:44:36.060908
|
pyproject.toml
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
[tool.mypy]
|
2 |
-
python_version = "3.
|
3 |
ignore_missing_imports = true
|
4 |
|
5 |
[tool.ruff]
|
6 |
fix = true
|
7 |
indent-width = 4
|
8 |
line-length = 100
|
9 |
-
target-version = "
|
|
|
1 |
[tool.mypy]
|
2 |
+
python_version = "3.11"
|
3 |
ignore_missing_imports = true
|
4 |
|
5 |
[tool.ruff]
|
6 |
fix = true
|
7 |
indent-width = 4
|
8 |
line-length = 100
|
9 |
+
target-version = "py311"
|
tasks/cleans.py
CHANGED
@@ -29,6 +29,7 @@ def ruff(ctx: Context) -> None:
|
|
29 |
@task
|
30 |
def python(ctx: Context) -> None:
|
31 |
"""Clean python files and folders."""
|
|
|
32 |
ctx.run("find . -type d -name __pycache__ -delete")
|
33 |
|
34 |
|
@@ -37,6 +38,6 @@ def all(_: Context) -> None:
|
|
37 |
"""Run all clean tasks."""
|
38 |
|
39 |
|
40 |
-
@task(pre=[
|
41 |
def reset(_: Context) -> None:
|
42 |
"""Reset the project state."""
|
|
|
29 |
@task
|
30 |
def python(ctx: Context) -> None:
|
31 |
"""Clean python files and folders."""
|
32 |
+
ctx.run("find . -type f -name '*.py[co]' -delete")
|
33 |
ctx.run("find . -type d -name __pycache__ -delete")
|
34 |
|
35 |
|
|
|
38 |
"""Run all clean tasks."""
|
39 |
|
40 |
|
41 |
+
@task(pre=[all, install])
|
42 |
def reset(_: Context) -> None:
|
43 |
"""Reset the project state."""
|