Spaces:
Running
Running
Upload folder using huggingface_hub
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .env +5 -0
- .env.template +4 -0
- .gitattributes +49 -0
- README.md +3 -9
- __init__.py +0 -0
- __pycache__/chatbot.cpython-311.pyc +0 -0
- __pycache__/constants.cpython-311.pyc +0 -0
- __pycache__/environments.cpython-311.pyc +0 -0
- __pycache__/qdrant.cpython-311.pyc +0 -0
- __pycache__/service_provider_config.cpython-311.pyc +0 -0
- chatbot.py +185 -0
- constants.py +46 -0
- environments.py +10 -0
- gradio-app.py +148 -0
- logs/chatbot.log +53 -0
- qdrant.py +8 -0
- requirements.txt +9 -0
- service_provider_config.py +10 -0
- venv-kip-llm/bin/Activate.ps1 +247 -0
- venv-kip-llm/bin/activate +69 -0
- venv-kip-llm/bin/activate.csh +26 -0
- venv-kip-llm/bin/activate.fish +69 -0
- venv-kip-llm/bin/chardetect +8 -0
- venv-kip-llm/bin/cygdb +8 -0
- venv-kip-llm/bin/cython +8 -0
- venv-kip-llm/bin/cythonize +8 -0
- venv-kip-llm/bin/distro +8 -0
- venv-kip-llm/bin/dotenv +8 -0
- venv-kip-llm/bin/f2py +8 -0
- venv-kip-llm/bin/filetype +8 -0
- venv-kip-llm/bin/fonttools +8 -0
- venv-kip-llm/bin/gradio +8 -0
- venv-kip-llm/bin/httpx +8 -0
- venv-kip-llm/bin/huggingface-cli +8 -0
- venv-kip-llm/bin/jsonschema +8 -0
- venv-kip-llm/bin/llamaindex-cli +8 -0
- venv-kip-llm/bin/markdown-it +8 -0
- venv-kip-llm/bin/nltk +8 -0
- venv-kip-llm/bin/normalizer +8 -0
- venv-kip-llm/bin/numba +8 -0
- venv-kip-llm/bin/openai +8 -0
- venv-kip-llm/bin/pip +8 -0
- venv-kip-llm/bin/pip3 +8 -0
- venv-kip-llm/bin/pip3.11 +8 -0
- venv-kip-llm/bin/pyftmerge +8 -0
- venv-kip-llm/bin/pyftsubset +8 -0
- venv-kip-llm/bin/pygmentize +8 -0
- venv-kip-llm/bin/python +0 -0
- venv-kip-llm/bin/python3 +0 -0
- venv-kip-llm/bin/python3.11 +0 -0
.env
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
OPENAI_API_KEY=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
|
2 |
+
QDRANT_URL=https://418ab8e7-b634-4abf-bed8-63d5b3567b18.us-east4-0.gcp.cloud.qdrant.io:6333
|
3 |
+
QDRANT_API_KEY=pq7_G7HmodTpipF7dtpuY8CZcCL6YPsuyWBlUUReLXX9iksQs3brFw
|
4 |
+
QDRANT_COLLECTION_NAME=T1G0CE4N7
|
5 |
+
OPENAI_LOG=debug
|
.env.template
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
QDRANT_URL=
|
2 |
+
QDRANT_API_KEY=
|
3 |
+
OPENAI_API_KEY=
|
4 |
+
QDRANT_COLLECTION_NAME=
|
.gitattributes
CHANGED
@@ -33,3 +33,52 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
venv-kip-llm/bin/ruff filter=lfs diff=lfs merge=lfs -text
|
37 |
+
venv-kip-llm/lib/python3.11/site-packages/PIL/.dylibs/libfreetype.6.dylib filter=lfs diff=lfs merge=lfs -text
|
38 |
+
venv-kip-llm/lib/python3.11/site-packages/PIL/.dylibs/libharfbuzz.0.dylib filter=lfs diff=lfs merge=lfs -text
|
39 |
+
venv-kip-llm/lib/python3.11/site-packages/altair/vegalite/v5/schema/__pycache__/channels.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
|
40 |
+
venv-kip-llm/lib/python3.11/site-packages/altair/vegalite/v5/schema/__pycache__/core.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
|
41 |
+
venv-kip-llm/lib/python3.11/site-packages/emoji/unicode_codes/__pycache__/data_dict.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
|
42 |
+
venv-kip-llm/lib/python3.11/site-packages/fontTools/misc/bezierTools.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
43 |
+
venv-kip-llm/lib/python3.11/site-packages/gradio/templates/frontend/assets/Index-75821454.js.map filter=lfs diff=lfs merge=lfs -text
|
44 |
+
venv-kip-llm/lib/python3.11/site-packages/grpc/_cython/cygrpc.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
45 |
+
venv-kip-llm/lib/python3.11/site-packages/grpc_tools/_protoc_compiler.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
46 |
+
venv-kip-llm/lib/python3.11/site-packages/llvmlite/binding/libllvmlite.dylib filter=lfs diff=lfs merge=lfs -text
|
47 |
+
venv-kip-llm/lib/python3.11/site-packages/lxml/etree.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
48 |
+
venv-kip-llm/lib/python3.11/site-packages/lxml/objectify.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
49 |
+
venv-kip-llm/lib/python3.11/site-packages/numpy/.dylibs/libgfortran.5.dylib filter=lfs diff=lfs merge=lfs -text
|
50 |
+
venv-kip-llm/lib/python3.11/site-packages/numpy/.dylibs/libopenblas64_.0.dylib filter=lfs diff=lfs merge=lfs -text
|
51 |
+
venv-kip-llm/lib/python3.11/site-packages/numpy/core/_multiarray_umath.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
52 |
+
venv-kip-llm/lib/python3.11/site-packages/pandas/_libs/algos.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
53 |
+
venv-kip-llm/lib/python3.11/site-packages/pandas/_libs/groupby.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
54 |
+
venv-kip-llm/lib/python3.11/site-packages/pandas/_libs/hashtable.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
55 |
+
venv-kip-llm/lib/python3.11/site-packages/pandas/_libs/interval.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
56 |
+
venv-kip-llm/lib/python3.11/site-packages/pandas/_libs/join.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
57 |
+
venv-kip-llm/lib/python3.11/site-packages/pandas/_libs/tslibs/offsets.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
58 |
+
venv-kip-llm/lib/python3.11/site-packages/pyarrow/_compute.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
59 |
+
venv-kip-llm/lib/python3.11/site-packages/pyarrow/_dataset.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
60 |
+
venv-kip-llm/lib/python3.11/site-packages/pyarrow/_flight.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
61 |
+
venv-kip-llm/lib/python3.11/site-packages/pyarrow/lib.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
62 |
+
venv-kip-llm/lib/python3.11/site-packages/pyarrow/libarrow.1500.dylib filter=lfs diff=lfs merge=lfs -text
|
63 |
+
venv-kip-llm/lib/python3.11/site-packages/pyarrow/libarrow_acero.1500.dylib filter=lfs diff=lfs merge=lfs -text
|
64 |
+
venv-kip-llm/lib/python3.11/site-packages/pyarrow/libarrow_dataset.1500.dylib filter=lfs diff=lfs merge=lfs -text
|
65 |
+
venv-kip-llm/lib/python3.11/site-packages/pyarrow/libarrow_flight.1500.dylib filter=lfs diff=lfs merge=lfs -text
|
66 |
+
venv-kip-llm/lib/python3.11/site-packages/pyarrow/libarrow_python.dylib filter=lfs diff=lfs merge=lfs -text
|
67 |
+
venv-kip-llm/lib/python3.11/site-packages/pyarrow/libarrow_substrait.1500.dylib filter=lfs diff=lfs merge=lfs -text
|
68 |
+
venv-kip-llm/lib/python3.11/site-packages/pyarrow/libparquet.1500.dylib filter=lfs diff=lfs merge=lfs -text
|
69 |
+
venv-kip-llm/lib/python3.11/site-packages/pydantic_core/_pydantic_core.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
70 |
+
venv-kip-llm/lib/python3.11/site-packages/rapidfuzz/distance/metrics_cpp.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
71 |
+
venv-kip-llm/lib/python3.11/site-packages/rapidfuzz/fuzz_cpp.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
72 |
+
venv-kip-llm/lib/python3.11/site-packages/rpds/rpds.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
73 |
+
venv-kip-llm/lib/python3.11/site-packages/scipy/.dylibs/libgfortran.5.dylib filter=lfs diff=lfs merge=lfs -text
|
74 |
+
venv-kip-llm/lib/python3.11/site-packages/scipy/.dylibs/libopenblas.0.dylib filter=lfs diff=lfs merge=lfs -text
|
75 |
+
venv-kip-llm/lib/python3.11/site-packages/scipy/io/_fast_matrix_market/_fmm_core.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
76 |
+
venv-kip-llm/lib/python3.11/site-packages/scipy/linalg/_flapack.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
77 |
+
venv-kip-llm/lib/python3.11/site-packages/scipy/misc/face.dat filter=lfs diff=lfs merge=lfs -text
|
78 |
+
venv-kip-llm/lib/python3.11/site-packages/scipy/optimize/_highs/_highs_wrapper.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
79 |
+
venv-kip-llm/lib/python3.11/site-packages/scipy/sparse/_sparsetools.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
80 |
+
venv-kip-llm/lib/python3.11/site-packages/scipy/special/_ufuncs.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
81 |
+
venv-kip-llm/lib/python3.11/site-packages/scipy/special/cython_special.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
82 |
+
venv-kip-llm/lib/python3.11/site-packages/scipy/stats/_unuran/unuran_wrapper.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
83 |
+
venv-kip-llm/lib/python3.11/site-packages/sklearn/_loss/_loss.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
84 |
+
venv-kip-llm/lib/python3.11/site-packages/tiktoken/_tiktoken.cpython-311-darwin.so filter=lfs diff=lfs merge=lfs -text
|
README.md
CHANGED
@@ -1,12 +1,6 @@
|
|
1 |
---
|
2 |
-
title:
|
3 |
-
|
4 |
-
colorFrom: red
|
5 |
-
colorTo: purple
|
6 |
sdk: gradio
|
7 |
-
sdk_version: 4.
|
8 |
-
app_file: app.py
|
9 |
-
pinned: false
|
10 |
---
|
11 |
-
|
12 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
1 |
---
|
2 |
+
title: kipwise-llm
|
3 |
+
app_file: gradio-app.py
|
|
|
|
|
4 |
sdk: gradio
|
5 |
+
sdk_version: 4.15.0
|
|
|
|
|
6 |
---
|
|
|
|
__init__.py
ADDED
File without changes
|
__pycache__/chatbot.cpython-311.pyc
ADDED
Binary file (10.7 kB). View file
|
|
__pycache__/constants.cpython-311.pyc
ADDED
Binary file (1.77 kB). View file
|
|
__pycache__/environments.cpython-311.pyc
ADDED
Binary file (576 Bytes). View file
|
|
__pycache__/qdrant.cpython-311.pyc
ADDED
Binary file (400 Bytes). View file
|
|
__pycache__/service_provider_config.cpython-311.pyc
ADDED
Binary file (746 Bytes). View file
|
|
chatbot.py
ADDED
@@ -0,0 +1,185 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
import os
|
3 |
+
import re
|
4 |
+
from typing import List
|
5 |
+
|
6 |
+
import llama_index
|
7 |
+
import phoenix as px
|
8 |
+
from llama_index.llms import ChatMessage, MessageRole
|
9 |
+
from openai import OpenAI
|
10 |
+
|
11 |
+
from environments import OPENAI_API_KEY
|
12 |
+
|
13 |
+
|
14 |
+
class IndexBuilder:
|
15 |
+
def __init__(self, vdb_collection_name, embed_model, is_load_from_vector_store=False):
|
16 |
+
self.documents = None
|
17 |
+
self.vdb_collection_name = vdb_collection_name
|
18 |
+
self.embed_model = embed_model
|
19 |
+
self.index = None
|
20 |
+
self.is_load_from_vector_store = is_load_from_vector_store
|
21 |
+
self.build_index()
|
22 |
+
|
23 |
+
def _load_doucments(self):
|
24 |
+
pass
|
25 |
+
|
26 |
+
def _setup_service_context(self):
|
27 |
+
print("Using global service context...")
|
28 |
+
|
29 |
+
def _setup_vector_store(self):
|
30 |
+
print("Setup vector store...")
|
31 |
+
|
32 |
+
def _setup_index(self):
|
33 |
+
if not self.is_load_from_vector_store and self.documents is None:
|
34 |
+
raise ValueError("No documents provided for index building.")
|
35 |
+
print("Building Index")
|
36 |
+
|
37 |
+
def build_index(self):
|
38 |
+
if self.is_load_from_vector_store:
|
39 |
+
self._setup_service_context()
|
40 |
+
self._setup_vector_store()
|
41 |
+
self._setup_index()
|
42 |
+
return
|
43 |
+
self._load_doucments()
|
44 |
+
self._setup_service_context()
|
45 |
+
self._setup_vector_store()
|
46 |
+
self._setup_index()
|
47 |
+
|
48 |
+
|
49 |
+
class Chatbot:
|
50 |
+
SYSTEM_PROMPT = ""
|
51 |
+
DENIED_ANSWER_PROMPT = ""
|
52 |
+
CHAT_EXAMPLES = []
|
53 |
+
|
54 |
+
def __init__(self, model_name, index_builder: IndexBuilder, llm=None):
|
55 |
+
self.model_name = model_name
|
56 |
+
self.index_builder = index_builder
|
57 |
+
self.llm = llm
|
58 |
+
|
59 |
+
self.documents = None
|
60 |
+
self.index = None
|
61 |
+
self.chat_engine = None
|
62 |
+
self.service_context = None
|
63 |
+
self.vector_store = None
|
64 |
+
self.tools = None
|
65 |
+
|
66 |
+
self._setup_logger()
|
67 |
+
self._setup_chatbot()
|
68 |
+
|
69 |
+
def _setup_logger(self):
|
70 |
+
logs_dir = 'logs'
|
71 |
+
if not os.path.exists(logs_dir):
|
72 |
+
os.makedirs(logs_dir) # Step 3: Create logs directory
|
73 |
+
|
74 |
+
logging.basicConfig(
|
75 |
+
filename=os.path.join(logs_dir, 'chatbot.log'),
|
76 |
+
filemode='a',
|
77 |
+
format='%(asctime)s - %(levelname)s - %(message)s',
|
78 |
+
level=logging.INFO
|
79 |
+
)
|
80 |
+
self.logger = logging.getLogger(__name__)
|
81 |
+
|
82 |
+
def _setup_chatbot(self):
|
83 |
+
# self._setup_observer()
|
84 |
+
self._setup_index()
|
85 |
+
self._setup_query_engine()
|
86 |
+
self._setup_tools()
|
87 |
+
self._setup_chat_engine()
|
88 |
+
|
89 |
+
def _setup_observer(self):
|
90 |
+
px.launch_app()
|
91 |
+
llama_index.set_global_handler("arize_phoenix")
|
92 |
+
|
93 |
+
def _setup_index(self):
|
94 |
+
self.index = self.index_builder.index
|
95 |
+
print("Inherited index builder")
|
96 |
+
|
97 |
+
def _setup_query_engine(self):
|
98 |
+
if self.index is None:
|
99 |
+
raise ValueError("No index built")
|
100 |
+
pass
|
101 |
+
print("Setup query engine...")
|
102 |
+
|
103 |
+
def _setup_tools(self):
|
104 |
+
pass
|
105 |
+
print("Setup tools...")
|
106 |
+
|
107 |
+
def _setup_chat_engine(self):
|
108 |
+
if self.index is None:
|
109 |
+
raise ValueError("No index built")
|
110 |
+
pass
|
111 |
+
print("Setup chat engine...")
|
112 |
+
|
113 |
+
def stream_chat(self, message, history):
|
114 |
+
self.logger.info(history)
|
115 |
+
self.logger.info(self.convert_to_chat_messages(history))
|
116 |
+
response = self.chat_engine.stream_chat(
|
117 |
+
message, chat_history=self.convert_to_chat_messages(history)
|
118 |
+
)
|
119 |
+
# Stream tokens as they are generated
|
120 |
+
partial_message = ""
|
121 |
+
for token in response.response_gen:
|
122 |
+
partial_message += token
|
123 |
+
yield partial_message
|
124 |
+
|
125 |
+
references = {}
|
126 |
+
for source in response.source_nodes:
|
127 |
+
if source.score < 0.76:
|
128 |
+
continue
|
129 |
+
url = source.node.metadata.get('url')
|
130 |
+
title = source.node.metadata.get('title')
|
131 |
+
if url and title:
|
132 |
+
references[url] = title
|
133 |
+
if references:
|
134 |
+
partial_message = partial_message + "\n \n\n---\n\nSources: \n"
|
135 |
+
for url, title in references.items():
|
136 |
+
partial_message = partial_message + f"- [{title}]({url})\n"
|
137 |
+
yield partial_message
|
138 |
+
return partial_message
|
139 |
+
|
140 |
+
def convert_to_chat_messages(self, history: List[List[str]]) -> List[ChatMessage]:
|
141 |
+
chat_messages = [ChatMessage(
|
142 |
+
role=MessageRole.SYSTEM, content=self.SYSTEM_PROMPT)]
|
143 |
+
for conversation in history[-3:]:
|
144 |
+
for index, message in enumerate(conversation):
|
145 |
+
role = MessageRole.USER if index % 2 == 0 else MessageRole.ASSISTANT
|
146 |
+
clean_message = re.sub(
|
147 |
+
r"\n \n\n---\n\nSources: \n.*$", "", message, flags=re.DOTALL)
|
148 |
+
chat_messages.append(ChatMessage(
|
149 |
+
role=role, content=clean_message.strip()))
|
150 |
+
return chat_messages
|
151 |
+
|
152 |
+
def predict_with_rag(self, message, history):
|
153 |
+
return self.stream_chat(message, history)
|
154 |
+
|
155 |
+
# Vanilla chatgpt methods, shared across all chatbot instance
|
156 |
+
def _invoke_chatgpt(self, history, message, is_include_system_prompt=False):
|
157 |
+
openai_client = OpenAI(api_key=OPENAI_API_KEY)
|
158 |
+
history_openai_format = []
|
159 |
+
if is_include_system_prompt:
|
160 |
+
history_openai_format.append(
|
161 |
+
{"role": "system", "content": self.SYSTEM_PROMPT})
|
162 |
+
for human, assistant in history:
|
163 |
+
history_openai_format.append({"role": "user", "content": human})
|
164 |
+
history_openai_format.append(
|
165 |
+
{"role": "assistant", "content": assistant})
|
166 |
+
history_openai_format.append({"role": "user", "content": message})
|
167 |
+
|
168 |
+
stream = openai_client.chat.completions.create(
|
169 |
+
model=self.model_name,
|
170 |
+
messages=history_openai_format,
|
171 |
+
temperature=1.0,
|
172 |
+
stream=True)
|
173 |
+
|
174 |
+
partial_message = ""
|
175 |
+
for part in stream:
|
176 |
+
partial_message += part.choices[0].delta.content or ""
|
177 |
+
yield partial_message
|
178 |
+
|
179 |
+
# For 'With Prompt Wrapper' - Add system prompt, no Pinecone
|
180 |
+
def predict_with_prompt_wrapper(self, message, history):
|
181 |
+
yield from self._invoke_chatgpt(history, message, is_include_system_prompt=True)
|
182 |
+
|
183 |
+
# For 'Vanilla ChatGPT' - No system prompt
|
184 |
+
def predict_vanilla_chatgpt(self, message, history):
|
185 |
+
yield from self._invoke_chatgpt(history, message)
|
constants.py
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from enum import Enum
|
2 |
+
|
3 |
+
from llama_index.core.llms.types import MessageRole
|
4 |
+
from llama_index.llms.base import ChatMessage
|
5 |
+
from llama_index.prompts.base import ChatPromptTemplate
|
6 |
+
|
7 |
+
|
8 |
+
class ChatbotVersion(str, Enum):
|
9 |
+
CHATGPT_35 = "gpt-3.5-turbo-1106"
|
10 |
+
CHATGPT_4 = "gpt-4-1106-preview"
|
11 |
+
|
12 |
+
|
13 |
+
class ServiceProvider(str, Enum):
|
14 |
+
OPENAI = "openai"
|
15 |
+
AZURE = "azure"
|
16 |
+
|
17 |
+
|
18 |
+
CHUNK_SIZE = 8191
|
19 |
+
IS_LOAD_FROM_VECTOR_STORE = True
|
20 |
+
DEFAULT_MODEL = ChatbotVersion.CHATGPT_35
|
21 |
+
|
22 |
+
TEXT_QA_SYSTEM_PROMPT = ChatMessage(
|
23 |
+
content=(
|
24 |
+
"You are an AI Assistant for Kipwise Knowledge Base."
|
25 |
+
),
|
26 |
+
role=MessageRole.SYSTEM,
|
27 |
+
)
|
28 |
+
|
29 |
+
TEXT_QA_PROMPT_TMPL_MSGS = [
|
30 |
+
TEXT_QA_SYSTEM_PROMPT,
|
31 |
+
ChatMessage(
|
32 |
+
content=(
|
33 |
+
"Context information is below.\n"
|
34 |
+
"---------------------\n"
|
35 |
+
"{context_str}\n"
|
36 |
+
"---------------------\n"
|
37 |
+
"Given the context information and not prior knowledge, "
|
38 |
+
"answer the query in a warm, approachable manner, ensuring clarity and precision.\n"
|
39 |
+
"Query: {query_str}\n"
|
40 |
+
"Answer: "
|
41 |
+
),
|
42 |
+
role=MessageRole.USER,
|
43 |
+
),
|
44 |
+
]
|
45 |
+
|
46 |
+
CHAT_TEXT_QA_PROMPT = ChatPromptTemplate(message_templates=TEXT_QA_PROMPT_TMPL_MSGS)
|
environments.py
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
from dotenv import load_dotenv
|
4 |
+
|
5 |
+
load_dotenv()
|
6 |
+
|
7 |
+
OPENAI_API_KEY = os.getenv('OPENAI_API_KEY')
|
8 |
+
QDRANT_URL = os.getenv('QDRANT_URL')
|
9 |
+
QDRANT_API_KEY = os.getenv('QDRANT_API_KEY')
|
10 |
+
QDRANT_COLLECTION_NAME = os.getenv('QDRANT_COLLECTION_NAME')
|
gradio-app.py
ADDED
@@ -0,0 +1,148 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import llama_index
|
3 |
+
import openai
|
4 |
+
import phoenix as px
|
5 |
+
from llama_index import ServiceContext, VectorStoreIndex
|
6 |
+
from llama_index import set_global_service_context
|
7 |
+
from llama_index.agent import OpenAIAgent
|
8 |
+
from llama_index.chat_engine.types import ChatMode
|
9 |
+
from llama_index.ingestion import IngestionPipeline
|
10 |
+
from llama_index.tools import QueryEngineTool
|
11 |
+
from llama_index.vector_stores.qdrant import QdrantVectorStore
|
12 |
+
|
13 |
+
from chatbot import Chatbot, IndexBuilder
|
14 |
+
from constants import CHAT_TEXT_QA_PROMPT, TEXT_QA_SYSTEM_PROMPT, CHUNK_SIZE, DEFAULT_MODEL, \
|
15 |
+
IS_LOAD_FROM_VECTOR_STORE
|
16 |
+
from environments import OPENAI_API_KEY, QDRANT_COLLECTION_NAME
|
17 |
+
from qdrant import client as qdrant_client
|
18 |
+
from service_provider_config import get_service_provider_config
|
19 |
+
|
20 |
+
px.launch_app()
|
21 |
+
llama_index.set_global_handler("arize_phoenix")
|
22 |
+
openai.api_key = OPENAI_API_KEY
|
23 |
+
|
24 |
+
llm, embedding_model = get_service_provider_config(model_name=DEFAULT_MODEL)
|
25 |
+
service_context = ServiceContext.from_defaults(
|
26 |
+
chunk_size=CHUNK_SIZE,
|
27 |
+
llm=llm,
|
28 |
+
embed_model=embedding_model,
|
29 |
+
)
|
30 |
+
set_global_service_context(service_context)
|
31 |
+
|
32 |
+
|
33 |
+
class KipIndexBuilder(IndexBuilder):
|
34 |
+
def _load_documents(self):
|
35 |
+
# TODO: implement logic to import documents into qdrant - API feeding logic to consider
|
36 |
+
pass
|
37 |
+
|
38 |
+
def _setup_service_context(self):
|
39 |
+
super()._setup_service_context()
|
40 |
+
|
41 |
+
def _setup_vector_store(self):
|
42 |
+
self.vector_store = QdrantVectorStore(
|
43 |
+
client=qdrant_client, collection_name=self.vdb_collection_name)
|
44 |
+
super()._setup_vector_store()
|
45 |
+
|
46 |
+
def _setup_index(self):
|
47 |
+
super()._setup_index()
|
48 |
+
if self.is_load_from_vector_store:
|
49 |
+
self.index = VectorStoreIndex.from_vector_store(self.vector_store)
|
50 |
+
print("set up index from vector store")
|
51 |
+
return
|
52 |
+
pipeline = IngestionPipeline(
|
53 |
+
transformations=[
|
54 |
+
self.embed_model,
|
55 |
+
],
|
56 |
+
vector_store=self.vector_store,
|
57 |
+
)
|
58 |
+
pipeline.run(documents=self.documents, show_progress=True)
|
59 |
+
self.index = VectorStoreIndex.from_vector_store(self.vector_store)
|
60 |
+
|
61 |
+
|
62 |
+
class KipToolChatbot(Chatbot):
|
63 |
+
DENIED_ANSWER_PROMPT = ""
|
64 |
+
SYSTEM_PROMPT = ""
|
65 |
+
CHAT_EXAMPLES = []
|
66 |
+
|
67 |
+
def _setup_observer(self):
|
68 |
+
pass
|
69 |
+
|
70 |
+
def _setup_index(self):
|
71 |
+
super()._setup_index()
|
72 |
+
|
73 |
+
def _setup_query_engine(self):
|
74 |
+
super()._setup_query_engine()
|
75 |
+
self.query_engine = self.index.as_query_engine(
|
76 |
+
text_qa_template=CHAT_TEXT_QA_PROMPT)
|
77 |
+
|
78 |
+
def _setup_tools(self):
|
79 |
+
super()._setup_tools()
|
80 |
+
self.tools = QueryEngineTool.from_defaults(
|
81 |
+
query_engine=self.query_engine)
|
82 |
+
|
83 |
+
def _setup_chat_engine(self):
|
84 |
+
super()._setup_chat_engine()
|
85 |
+
self.chat_engine = OpenAIAgent.from_tools(
|
86 |
+
tools=[self.tools],
|
87 |
+
llm=llm,
|
88 |
+
similarity_top_k=1,
|
89 |
+
verbose=True
|
90 |
+
)
|
91 |
+
|
92 |
+
|
93 |
+
class KipContextChatbot(KipToolChatbot):
|
94 |
+
def _setup_chat_engine(self):
|
95 |
+
self.chat_engine = self.index.as_chat_engine(
|
96 |
+
chat_mode=ChatMode.CONTEXT,
|
97 |
+
similarity_top_k=5,
|
98 |
+
system_prompt=TEXT_QA_SYSTEM_PROMPT.content,
|
99 |
+
text_qa_template=CHAT_TEXT_QA_PROMPT)
|
100 |
+
|
101 |
+
|
102 |
+
class KipSimpleChatbot(KipToolChatbot):
|
103 |
+
def _setup_chat_engine(self):
|
104 |
+
self.chat_engine = self.index.as_chat_engine(
|
105 |
+
chat_mode=ChatMode.SIMPLE)
|
106 |
+
|
107 |
+
|
108 |
+
index_builder = KipIndexBuilder(vdb_collection_name=QDRANT_COLLECTION_NAME,
|
109 |
+
embed_model=embedding_model,
|
110 |
+
is_load_from_vector_store=IS_LOAD_FROM_VECTOR_STORE)
|
111 |
+
|
112 |
+
kip_chatbot = KipToolChatbot(model_name=DEFAULT_MODEL, index_builder=index_builder)
|
113 |
+
kip_chatbot_context = KipContextChatbot(model_name=DEFAULT_MODEL, index_builder=index_builder)
|
114 |
+
kip_chatbot_simple = KipSimpleChatbot(model_name=DEFAULT_MODEL, index_builder=index_builder)
|
115 |
+
|
116 |
+
|
117 |
+
def vote(data: gr.LikeData):
|
118 |
+
if data.liked:
|
119 |
+
gr.Info("You up-voted this response: " + data.value)
|
120 |
+
else:
|
121 |
+
gr.Info("You down-voted this response: " + data.value)
|
122 |
+
|
123 |
+
|
124 |
+
chatbot = gr.Chatbot()
|
125 |
+
|
126 |
+
with gr.Blocks() as demo:
|
127 |
+
gr.Markdown("# Kipwise LLM demo")
|
128 |
+
|
129 |
+
with gr.Tab("Using relevant context sent to system prompt"):
|
130 |
+
context_interface = gr.ChatInterface(
|
131 |
+
kip_chatbot_context.stream_chat,
|
132 |
+
examples=kip_chatbot.CHAT_EXAMPLES,
|
133 |
+
)
|
134 |
+
chatbot.like(vote, None, None)
|
135 |
+
|
136 |
+
with gr.Tab("Using function calling as tool to retrieve"):
|
137 |
+
function_call_interface = gr.ChatInterface(
|
138 |
+
kip_chatbot.stream_chat,
|
139 |
+
examples=kip_chatbot.CHAT_EXAMPLES,
|
140 |
+
)
|
141 |
+
chatbot.like(vote, None, None)
|
142 |
+
|
143 |
+
with gr.Tab("Vanilla ChatGPT without modification"):
|
144 |
+
vanilla_interface = gr.ChatInterface(
|
145 |
+
kip_chatbot_simple.stream_chat,
|
146 |
+
examples=kip_chatbot.CHAT_EXAMPLES)
|
147 |
+
demo.queue(False).launch(server_name='0.0.0.0', share=False)
|
148 |
+
|
logs/chatbot.log
ADDED
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2024-02-08 01:44:46,292 - INFO - HTTP Request: GET http://localhost:7860/startup-events "HTTP/1.1 200 OK"
|
2 |
+
2024-02-08 01:44:46,461 - INFO - HTTP Request: HEAD http://localhost:7860/ "HTTP/1.1 200 OK"
|
3 |
+
2024-02-08 01:44:46,843 - INFO - HTTP Request: GET https://checkip.amazonaws.com/ "HTTP/1.1 200 "
|
4 |
+
2024-02-08 01:44:46,894 - INFO - HTTP Request: GET https://checkip.amazonaws.com/ "HTTP/1.1 200 "
|
5 |
+
2024-02-08 01:44:46,898 - INFO - HTTP Request: GET https://checkip.amazonaws.com/ "HTTP/1.1 200 "
|
6 |
+
2024-02-08 01:44:46,942 - INFO - HTTP Request: GET https://checkip.amazonaws.com/ "HTTP/1.1 200 "
|
7 |
+
2024-02-08 01:44:47,055 - INFO - HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
8 |
+
2024-02-08 01:44:47,105 - INFO - HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
9 |
+
2024-02-08 01:44:47,141 - INFO - HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
10 |
+
2024-02-08 01:44:47,418 - INFO - HTTP Request: GET https://checkip.amazonaws.com/ "HTTP/1.1 200 "
|
11 |
+
2024-02-08 01:44:47,706 - INFO - HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
12 |
+
2024-02-08 01:44:48,078 - INFO - HTTP Request: POST https://api.gradio.app/gradio-initiated-analytics/ "HTTP/1.1 200 OK"
|
13 |
+
2024-02-08 01:44:48,107 - INFO - HTTP Request: POST https://api.gradio.app/gradio-initiated-analytics/ "HTTP/1.1 200 OK"
|
14 |
+
2024-02-08 01:44:48,204 - INFO - HTTP Request: POST https://api.gradio.app/gradio-initiated-analytics/ "HTTP/1.1 200 OK"
|
15 |
+
2024-02-08 01:44:48,611 - INFO - HTTP Request: POST https://api.gradio.app/gradio-initiated-analytics/ "HTTP/1.1 200 OK"
|
16 |
+
2024-02-08 01:44:52,228 - INFO - HTTP Request: POST https://api.gradio.app/gradio-launched-telemetry/ "HTTP/1.1 200 OK"
|
17 |
+
2024-02-08 03:59:28,785 - INFO - HTTP Request: GET http://localhost:7860/startup-events "HTTP/1.1 200 OK"
|
18 |
+
2024-02-08 03:59:28,794 - INFO - HTTP Request: HEAD http://localhost:7860/ "HTTP/1.1 200 OK"
|
19 |
+
2024-02-08 03:59:29,587 - INFO - HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
20 |
+
2024-02-08 03:59:29,675 - INFO - HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
21 |
+
2024-02-08 03:59:29,714 - INFO - HTTP Request: GET https://checkip.amazonaws.com/ "HTTP/1.1 200 "
|
22 |
+
2024-02-08 03:59:29,722 - INFO - HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
23 |
+
2024-02-08 03:59:29,726 - INFO - HTTP Request: GET https://checkip.amazonaws.com/ "HTTP/1.1 200 "
|
24 |
+
2024-02-08 03:59:29,740 - INFO - HTTP Request: GET https://checkip.amazonaws.com/ "HTTP/1.1 200 "
|
25 |
+
2024-02-08 03:59:29,756 - INFO - HTTP Request: GET https://checkip.amazonaws.com/ "HTTP/1.1 200 "
|
26 |
+
2024-02-08 03:59:29,757 - INFO - HTTP Request: GET https://checkip.amazonaws.com/ "HTTP/1.1 200 "
|
27 |
+
2024-02-08 03:59:29,773 - INFO - HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
28 |
+
2024-02-08 03:59:30,874 - INFO - HTTP Request: POST https://api.gradio.app/gradio-launched-telemetry/ "HTTP/1.1 200 OK"
|
29 |
+
2024-02-08 03:59:30,887 - INFO - HTTP Request: POST https://api.gradio.app/gradio-initiated-analytics/ "HTTP/1.1 200 OK"
|
30 |
+
2024-02-08 03:59:30,889 - INFO - HTTP Request: POST https://api.gradio.app/gradio-initiated-analytics/ "HTTP/1.1 200 OK"
|
31 |
+
2024-02-08 03:59:30,951 - INFO - HTTP Request: POST https://api.gradio.app/gradio-initiated-analytics/ "HTTP/1.1 200 OK"
|
32 |
+
2024-02-08 03:59:31,036 - INFO - HTTP Request: POST https://api.gradio.app/gradio-initiated-analytics/ "HTTP/1.1 200 OK"
|
33 |
+
2024-02-08 03:59:39,666 - INFO - []
|
34 |
+
2024-02-08 03:59:39,667 - INFO - [ChatMessage(role=<MessageRole.SYSTEM: 'system'>, content='', additional_kwargs={})]
|
35 |
+
2024-02-08 03:59:40,449 - INFO - HTTP Request: POST https://api.openai.com/v1/embeddings "HTTP/1.1 200 OK"
|
36 |
+
2024-02-08 03:59:41,677 - INFO - HTTP Request: POST https://418ab8e7-b634-4abf-bed8-63d5b3567b18.us-east4-0.gcp.cloud.qdrant.io:6333/collections/T1G0CE4N7/points/search "HTTP/1.1 200 OK"
|
37 |
+
2024-02-08 03:59:42,513 - INFO - HTTP Request: POST https://api.openai.com/v1/chat/completions "HTTP/1.1 200 OK"
|
38 |
+
2024-02-08 04:43:57,450 - INFO - HTTP Request: GET http://localhost:7860/startup-events "HTTP/1.1 200 OK"
|
39 |
+
2024-02-08 04:43:57,456 - INFO - HTTP Request: HEAD http://localhost:7860/ "HTTP/1.1 200 OK"
|
40 |
+
2024-02-08 04:43:58,286 - INFO - HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
41 |
+
2024-02-08 04:43:58,312 - INFO - HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
42 |
+
2024-02-08 04:43:58,351 - INFO - HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
43 |
+
2024-02-08 04:43:58,414 - INFO - HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
44 |
+
2024-02-08 04:43:58,529 - INFO - HTTP Request: GET https://checkip.amazonaws.com/ "HTTP/1.1 200 "
|
45 |
+
2024-02-08 04:43:58,531 - INFO - HTTP Request: GET https://checkip.amazonaws.com/ "HTTP/1.1 200 "
|
46 |
+
2024-02-08 04:43:58,536 - INFO - HTTP Request: GET https://checkip.amazonaws.com/ "HTTP/1.1 200 "
|
47 |
+
2024-02-08 04:43:58,545 - INFO - HTTP Request: GET https://checkip.amazonaws.com/ "HTTP/1.1 200 "
|
48 |
+
2024-02-08 04:43:58,546 - INFO - HTTP Request: GET https://checkip.amazonaws.com/ "HTTP/1.1 200 "
|
49 |
+
2024-02-08 04:43:59,820 - INFO - HTTP Request: POST https://api.gradio.app/gradio-initiated-analytics/ "HTTP/1.1 200 OK"
|
50 |
+
2024-02-08 04:43:59,820 - INFO - HTTP Request: POST https://api.gradio.app/gradio-initiated-analytics/ "HTTP/1.1 200 OK"
|
51 |
+
2024-02-08 04:43:59,821 - INFO - HTTP Request: POST https://api.gradio.app/gradio-initiated-analytics/ "HTTP/1.1 200 OK"
|
52 |
+
2024-02-08 04:43:59,868 - INFO - HTTP Request: POST https://api.gradio.app/gradio-initiated-analytics/ "HTTP/1.1 200 OK"
|
53 |
+
2024-02-08 04:43:59,869 - INFO - HTTP Request: POST https://api.gradio.app/gradio-launched-telemetry/ "HTTP/1.1 200 OK"
|
qdrant.py
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from qdrant_client import QdrantClient
|
2 |
+
|
3 |
+
from environments import QDRANT_URL, QDRANT_API_KEY
|
4 |
+
|
5 |
+
client = QdrantClient(
|
6 |
+
url=QDRANT_URL,
|
7 |
+
api_key=QDRANT_API_KEY,
|
8 |
+
)
|
requirements.txt
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
gradio==4.15.0
|
2 |
+
llama_index==0.9.34
|
3 |
+
nltk==3.8.1
|
4 |
+
openai==1.8.0
|
5 |
+
phoenix==0.9.1
|
6 |
+
python-dotenv==1.0.0
|
7 |
+
qdrant_client==1.7.1
|
8 |
+
unstructured==0.12.0
|
9 |
+
arize-phoenix==2.5.0
|
service_provider_config.py
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from llama_index import OpenAIEmbedding
|
2 |
+
from llama_index.llms import OpenAI
|
3 |
+
|
4 |
+
from constants import ChatbotVersion
|
5 |
+
|
6 |
+
|
7 |
+
def get_service_provider_config(model_name: str = ChatbotVersion.CHATGPT_35.value):
|
8 |
+
llm = OpenAI(model=model_name)
|
9 |
+
embed_model = OpenAIEmbedding()
|
10 |
+
return llm, embed_model
|
venv-kip-llm/bin/Activate.ps1
ADDED
@@ -0,0 +1,247 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<#
|
2 |
+
.Synopsis
|
3 |
+
Activate a Python virtual environment for the current PowerShell session.
|
4 |
+
|
5 |
+
.Description
|
6 |
+
Pushes the python executable for a virtual environment to the front of the
|
7 |
+
$Env:PATH environment variable and sets the prompt to signify that you are
|
8 |
+
in a Python virtual environment. Makes use of the command line switches as
|
9 |
+
well as the `pyvenv.cfg` file values present in the virtual environment.
|
10 |
+
|
11 |
+
.Parameter VenvDir
|
12 |
+
Path to the directory that contains the virtual environment to activate. The
|
13 |
+
default value for this is the parent of the directory that the Activate.ps1
|
14 |
+
script is located within.
|
15 |
+
|
16 |
+
.Parameter Prompt
|
17 |
+
The prompt prefix to display when this virtual environment is activated. By
|
18 |
+
default, this prompt is the name of the virtual environment folder (VenvDir)
|
19 |
+
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
20 |
+
|
21 |
+
.Example
|
22 |
+
Activate.ps1
|
23 |
+
Activates the Python virtual environment that contains the Activate.ps1 script.
|
24 |
+
|
25 |
+
.Example
|
26 |
+
Activate.ps1 -Verbose
|
27 |
+
Activates the Python virtual environment that contains the Activate.ps1 script,
|
28 |
+
and shows extra information about the activation as it executes.
|
29 |
+
|
30 |
+
.Example
|
31 |
+
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
32 |
+
Activates the Python virtual environment located in the specified location.
|
33 |
+
|
34 |
+
.Example
|
35 |
+
Activate.ps1 -Prompt "MyPython"
|
36 |
+
Activates the Python virtual environment that contains the Activate.ps1 script,
|
37 |
+
and prefixes the current prompt with the specified string (surrounded in
|
38 |
+
parentheses) while the virtual environment is active.
|
39 |
+
|
40 |
+
.Notes
|
41 |
+
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
42 |
+
execution policy for the user. You can do this by issuing the following PowerShell
|
43 |
+
command:
|
44 |
+
|
45 |
+
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
46 |
+
|
47 |
+
For more information on Execution Policies:
|
48 |
+
https://go.microsoft.com/fwlink/?LinkID=135170
|
49 |
+
|
50 |
+
#>
|
51 |
+
Param(
|
52 |
+
[Parameter(Mandatory = $false)]
|
53 |
+
[String]
|
54 |
+
$VenvDir,
|
55 |
+
[Parameter(Mandatory = $false)]
|
56 |
+
[String]
|
57 |
+
$Prompt
|
58 |
+
)
|
59 |
+
|
60 |
+
<# Function declarations --------------------------------------------------- #>
|
61 |
+
|
62 |
+
<#
|
63 |
+
.Synopsis
|
64 |
+
Remove all shell session elements added by the Activate script, including the
|
65 |
+
addition of the virtual environment's Python executable from the beginning of
|
66 |
+
the PATH variable.
|
67 |
+
|
68 |
+
.Parameter NonDestructive
|
69 |
+
If present, do not remove this function from the global namespace for the
|
70 |
+
session.
|
71 |
+
|
72 |
+
#>
|
73 |
+
function global:deactivate ([switch]$NonDestructive) {
|
74 |
+
# Revert to original values
|
75 |
+
|
76 |
+
# The prior prompt:
|
77 |
+
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
78 |
+
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
79 |
+
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
80 |
+
}
|
81 |
+
|
82 |
+
# The prior PYTHONHOME:
|
83 |
+
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
84 |
+
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
85 |
+
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
86 |
+
}
|
87 |
+
|
88 |
+
# The prior PATH:
|
89 |
+
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
90 |
+
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
91 |
+
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
92 |
+
}
|
93 |
+
|
94 |
+
# Just remove the VIRTUAL_ENV altogether:
|
95 |
+
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
96 |
+
Remove-Item -Path env:VIRTUAL_ENV
|
97 |
+
}
|
98 |
+
|
99 |
+
# Just remove VIRTUAL_ENV_PROMPT altogether.
|
100 |
+
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
|
101 |
+
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
|
102 |
+
}
|
103 |
+
|
104 |
+
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
105 |
+
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
106 |
+
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
107 |
+
}
|
108 |
+
|
109 |
+
# Leave deactivate function in the global namespace if requested:
|
110 |
+
if (-not $NonDestructive) {
|
111 |
+
Remove-Item -Path function:deactivate
|
112 |
+
}
|
113 |
+
}
|
114 |
+
|
115 |
+
<#
|
116 |
+
.Description
|
117 |
+
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
118 |
+
given folder, and returns them in a map.
|
119 |
+
|
120 |
+
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
121 |
+
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
122 |
+
then it is considered a `key = value` line. The left hand string is the key,
|
123 |
+
the right hand is the value.
|
124 |
+
|
125 |
+
If the value starts with a `'` or a `"` then the first and last character is
|
126 |
+
stripped from the value before being captured.
|
127 |
+
|
128 |
+
.Parameter ConfigDir
|
129 |
+
Path to the directory that contains the `pyvenv.cfg` file.
|
130 |
+
#>
|
131 |
+
function Get-PyVenvConfig(
|
132 |
+
[String]
|
133 |
+
$ConfigDir
|
134 |
+
) {
|
135 |
+
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
136 |
+
|
137 |
+
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
138 |
+
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
139 |
+
|
140 |
+
# An empty map will be returned if no config file is found.
|
141 |
+
$pyvenvConfig = @{ }
|
142 |
+
|
143 |
+
if ($pyvenvConfigPath) {
|
144 |
+
|
145 |
+
Write-Verbose "File exists, parse `key = value` lines"
|
146 |
+
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
147 |
+
|
148 |
+
$pyvenvConfigContent | ForEach-Object {
|
149 |
+
$keyval = $PSItem -split "\s*=\s*", 2
|
150 |
+
if ($keyval[0] -and $keyval[1]) {
|
151 |
+
$val = $keyval[1]
|
152 |
+
|
153 |
+
# Remove extraneous quotations around a string value.
|
154 |
+
if ("'""".Contains($val.Substring(0, 1))) {
|
155 |
+
$val = $val.Substring(1, $val.Length - 2)
|
156 |
+
}
|
157 |
+
|
158 |
+
$pyvenvConfig[$keyval[0]] = $val
|
159 |
+
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
160 |
+
}
|
161 |
+
}
|
162 |
+
}
|
163 |
+
return $pyvenvConfig
|
164 |
+
}
|
165 |
+
|
166 |
+
|
167 |
+
<# Begin Activate script --------------------------------------------------- #>
|
168 |
+
|
169 |
+
# Determine the containing directory of this script
|
170 |
+
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
171 |
+
$VenvExecDir = Get-Item -Path $VenvExecPath
|
172 |
+
|
173 |
+
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
174 |
+
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
175 |
+
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
176 |
+
|
177 |
+
# Set values required in priority: CmdLine, ConfigFile, Default
|
178 |
+
# First, get the location of the virtual environment, it might not be
|
179 |
+
# VenvExecDir if specified on the command line.
|
180 |
+
if ($VenvDir) {
|
181 |
+
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
182 |
+
}
|
183 |
+
else {
|
184 |
+
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
185 |
+
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
186 |
+
Write-Verbose "VenvDir=$VenvDir"
|
187 |
+
}
|
188 |
+
|
189 |
+
# Next, read the `pyvenv.cfg` file to determine any required value such
|
190 |
+
# as `prompt`.
|
191 |
+
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
192 |
+
|
193 |
+
# Next, set the prompt from the command line, or the config file, or
|
194 |
+
# just use the name of the virtual environment folder.
|
195 |
+
if ($Prompt) {
|
196 |
+
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
197 |
+
}
|
198 |
+
else {
|
199 |
+
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
200 |
+
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
201 |
+
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
202 |
+
$Prompt = $pyvenvCfg['prompt'];
|
203 |
+
}
|
204 |
+
else {
|
205 |
+
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
|
206 |
+
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
207 |
+
$Prompt = Split-Path -Path $venvDir -Leaf
|
208 |
+
}
|
209 |
+
}
|
210 |
+
|
211 |
+
Write-Verbose "Prompt = '$Prompt'"
|
212 |
+
Write-Verbose "VenvDir='$VenvDir'"
|
213 |
+
|
214 |
+
# Deactivate any currently active virtual environment, but leave the
|
215 |
+
# deactivate function in place.
|
216 |
+
deactivate -nondestructive
|
217 |
+
|
218 |
+
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
219 |
+
# that there is an activated venv.
|
220 |
+
$env:VIRTUAL_ENV = $VenvDir
|
221 |
+
|
222 |
+
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
223 |
+
|
224 |
+
Write-Verbose "Setting prompt to '$Prompt'"
|
225 |
+
|
226 |
+
# Set the prompt to include the env name
|
227 |
+
# Make sure _OLD_VIRTUAL_PROMPT is global
|
228 |
+
function global:_OLD_VIRTUAL_PROMPT { "" }
|
229 |
+
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
230 |
+
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
231 |
+
|
232 |
+
function global:prompt {
|
233 |
+
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
234 |
+
_OLD_VIRTUAL_PROMPT
|
235 |
+
}
|
236 |
+
$env:VIRTUAL_ENV_PROMPT = $Prompt
|
237 |
+
}
|
238 |
+
|
239 |
+
# Clear PYTHONHOME
|
240 |
+
if (Test-Path -Path Env:PYTHONHOME) {
|
241 |
+
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
242 |
+
Remove-Item -Path Env:PYTHONHOME
|
243 |
+
}
|
244 |
+
|
245 |
+
# Add the venv to the PATH
|
246 |
+
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
247 |
+
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
venv-kip-llm/bin/activate
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file must be used with "source bin/activate" *from bash*
|
2 |
+
# you cannot run it directly
|
3 |
+
|
4 |
+
deactivate () {
|
5 |
+
# reset old environment variables
|
6 |
+
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
7 |
+
PATH="${_OLD_VIRTUAL_PATH:-}"
|
8 |
+
export PATH
|
9 |
+
unset _OLD_VIRTUAL_PATH
|
10 |
+
fi
|
11 |
+
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
12 |
+
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
13 |
+
export PYTHONHOME
|
14 |
+
unset _OLD_VIRTUAL_PYTHONHOME
|
15 |
+
fi
|
16 |
+
|
17 |
+
# This should detect bash and zsh, which have a hash command that must
|
18 |
+
# be called to get it to forget past commands. Without forgetting
|
19 |
+
# past commands the $PATH changes we made may not be respected
|
20 |
+
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
21 |
+
hash -r 2> /dev/null
|
22 |
+
fi
|
23 |
+
|
24 |
+
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
25 |
+
PS1="${_OLD_VIRTUAL_PS1:-}"
|
26 |
+
export PS1
|
27 |
+
unset _OLD_VIRTUAL_PS1
|
28 |
+
fi
|
29 |
+
|
30 |
+
unset VIRTUAL_ENV
|
31 |
+
unset VIRTUAL_ENV_PROMPT
|
32 |
+
if [ ! "${1:-}" = "nondestructive" ] ; then
|
33 |
+
# Self destruct!
|
34 |
+
unset -f deactivate
|
35 |
+
fi
|
36 |
+
}
|
37 |
+
|
38 |
+
# unset irrelevant variables
|
39 |
+
deactivate nondestructive
|
40 |
+
|
41 |
+
VIRTUAL_ENV="/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm"
|
42 |
+
export VIRTUAL_ENV
|
43 |
+
|
44 |
+
_OLD_VIRTUAL_PATH="$PATH"
|
45 |
+
PATH="$VIRTUAL_ENV/bin:$PATH"
|
46 |
+
export PATH
|
47 |
+
|
48 |
+
# unset PYTHONHOME if set
|
49 |
+
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
50 |
+
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
51 |
+
if [ -n "${PYTHONHOME:-}" ] ; then
|
52 |
+
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
53 |
+
unset PYTHONHOME
|
54 |
+
fi
|
55 |
+
|
56 |
+
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
57 |
+
_OLD_VIRTUAL_PS1="${PS1:-}"
|
58 |
+
PS1="(venv-kip-llm) ${PS1:-}"
|
59 |
+
export PS1
|
60 |
+
VIRTUAL_ENV_PROMPT="(venv-kip-llm) "
|
61 |
+
export VIRTUAL_ENV_PROMPT
|
62 |
+
fi
|
63 |
+
|
64 |
+
# This should detect bash and zsh, which have a hash command that must
|
65 |
+
# be called to get it to forget past commands. Without forgetting
|
66 |
+
# past commands the $PATH changes we made may not be respected
|
67 |
+
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
68 |
+
hash -r 2> /dev/null
|
69 |
+
fi
|
venv-kip-llm/bin/activate.csh
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file must be used with "source bin/activate.csh" *from csh*.
|
2 |
+
# You cannot run it directly.
|
3 |
+
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
4 |
+
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
5 |
+
|
6 |
+
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
|
7 |
+
|
8 |
+
# Unset irrelevant variables.
|
9 |
+
deactivate nondestructive
|
10 |
+
|
11 |
+
setenv VIRTUAL_ENV "/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm"
|
12 |
+
|
13 |
+
set _OLD_VIRTUAL_PATH="$PATH"
|
14 |
+
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
|
15 |
+
|
16 |
+
|
17 |
+
set _OLD_VIRTUAL_PROMPT="$prompt"
|
18 |
+
|
19 |
+
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
20 |
+
set prompt = "(venv-kip-llm) $prompt"
|
21 |
+
setenv VIRTUAL_ENV_PROMPT "(venv-kip-llm) "
|
22 |
+
endif
|
23 |
+
|
24 |
+
alias pydoc python -m pydoc
|
25 |
+
|
26 |
+
rehash
|
venv-kip-llm/bin/activate.fish
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
2 |
+
# (https://fishshell.com/); you cannot run it directly.
|
3 |
+
|
4 |
+
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
5 |
+
# reset old environment variables
|
6 |
+
if test -n "$_OLD_VIRTUAL_PATH"
|
7 |
+
set -gx PATH $_OLD_VIRTUAL_PATH
|
8 |
+
set -e _OLD_VIRTUAL_PATH
|
9 |
+
end
|
10 |
+
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
11 |
+
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
12 |
+
set -e _OLD_VIRTUAL_PYTHONHOME
|
13 |
+
end
|
14 |
+
|
15 |
+
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
16 |
+
set -e _OLD_FISH_PROMPT_OVERRIDE
|
17 |
+
# prevents error when using nested fish instances (Issue #93858)
|
18 |
+
if functions -q _old_fish_prompt
|
19 |
+
functions -e fish_prompt
|
20 |
+
functions -c _old_fish_prompt fish_prompt
|
21 |
+
functions -e _old_fish_prompt
|
22 |
+
end
|
23 |
+
end
|
24 |
+
|
25 |
+
set -e VIRTUAL_ENV
|
26 |
+
set -e VIRTUAL_ENV_PROMPT
|
27 |
+
if test "$argv[1]" != "nondestructive"
|
28 |
+
# Self-destruct!
|
29 |
+
functions -e deactivate
|
30 |
+
end
|
31 |
+
end
|
32 |
+
|
33 |
+
# Unset irrelevant variables.
|
34 |
+
deactivate nondestructive
|
35 |
+
|
36 |
+
set -gx VIRTUAL_ENV "/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm"
|
37 |
+
|
38 |
+
set -gx _OLD_VIRTUAL_PATH $PATH
|
39 |
+
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
|
40 |
+
|
41 |
+
# Unset PYTHONHOME if set.
|
42 |
+
if set -q PYTHONHOME
|
43 |
+
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
44 |
+
set -e PYTHONHOME
|
45 |
+
end
|
46 |
+
|
47 |
+
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
48 |
+
# fish uses a function instead of an env var to generate the prompt.
|
49 |
+
|
50 |
+
# Save the current fish_prompt function as the function _old_fish_prompt.
|
51 |
+
functions -c fish_prompt _old_fish_prompt
|
52 |
+
|
53 |
+
# With the original prompt function renamed, we can override with our own.
|
54 |
+
function fish_prompt
|
55 |
+
# Save the return status of the last command.
|
56 |
+
set -l old_status $status
|
57 |
+
|
58 |
+
# Output the venv prompt; color taken from the blue of the Python logo.
|
59 |
+
printf "%s%s%s" (set_color 4B8BBE) "(venv-kip-llm) " (set_color normal)
|
60 |
+
|
61 |
+
# Restore the return status of the previous command.
|
62 |
+
echo "exit $old_status" | .
|
63 |
+
# Output the original/"old" prompt.
|
64 |
+
_old_fish_prompt
|
65 |
+
end
|
66 |
+
|
67 |
+
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
68 |
+
set -gx VIRTUAL_ENV_PROMPT "(venv-kip-llm) "
|
69 |
+
end
|
venv-kip-llm/bin/chardetect
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from chardet.cli.chardetect import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv-kip-llm/bin/cygdb
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from Cython.Debugger.Cygdb import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv-kip-llm/bin/cython
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from Cython.Compiler.Main import setuptools_main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(setuptools_main())
|
venv-kip-llm/bin/cythonize
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from Cython.Build.Cythonize import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv-kip-llm/bin/distro
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from distro.distro import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv-kip-llm/bin/dotenv
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from dotenv.__main__ import cli
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(cli())
|
venv-kip-llm/bin/f2py
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from numpy.f2py.f2py2e import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv-kip-llm/bin/filetype
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from filetype.__main__ import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv-kip-llm/bin/fonttools
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from fontTools.__main__ import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv-kip-llm/bin/gradio
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from gradio.cli import cli
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(cli())
|
venv-kip-llm/bin/httpx
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from httpx import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv-kip-llm/bin/huggingface-cli
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from huggingface_hub.commands.huggingface_cli import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv-kip-llm/bin/jsonschema
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from jsonschema.cli import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv-kip-llm/bin/llamaindex-cli
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from llama_index.command_line.command_line import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv-kip-llm/bin/markdown-it
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from markdown_it.cli.parse import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv-kip-llm/bin/nltk
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from nltk.cli import cli
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(cli())
|
venv-kip-llm/bin/normalizer
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from charset_normalizer.cli import cli_detect
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(cli_detect())
|
venv-kip-llm/bin/numba
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: UTF-8 -*-
|
3 |
+
from __future__ import print_function, division, absolute_import
|
4 |
+
|
5 |
+
from numba.misc.numba_entry import main
|
6 |
+
|
7 |
+
if __name__ == "__main__":
|
8 |
+
main()
|
venv-kip-llm/bin/openai
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from openai.cli import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv-kip-llm/bin/pip
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from pip._internal.cli.main import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv-kip-llm/bin/pip3
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from pip._internal.cli.main import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv-kip-llm/bin/pip3.11
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from pip._internal.cli.main import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv-kip-llm/bin/pyftmerge
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from fontTools.merge import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv-kip-llm/bin/pyftsubset
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from fontTools.subset import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv-kip-llm/bin/pygmentize
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/cowcow02/Repo/kipwise/kip-llm/gradio-app/venv-kip-llm/bin/python3.11
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from pygments.cmdline import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv-kip-llm/bin/python
ADDED
Binary file (52.6 kB). View file
|
|
venv-kip-llm/bin/python3
ADDED
Binary file (52.6 kB). View file
|
|
venv-kip-llm/bin/python3.11
ADDED
Binary file (52.6 kB). View file
|
|