Spaces:
Running
Running
phyloforfun
commited on
Commit
•
d174ce3
1
Parent(s):
f56fafe
Major update. Support for 15 LLMs, World Flora Online taxonomy validation, geolocation, 2 OCR methods, significant UI changes, stability improvements, consistent JSON parsing
Browse files- api_status.yaml +11 -0
- app.py +1 -1
- vouchervision/API_validation.py +4 -3
- vouchervision/LLM_GooglePalm2.py +9 -4
- vouchervision/utils_VoucherVision.py +13 -10
api_status.yaml
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
date: January 26, 2024
|
2 |
+
missing_keys: []
|
3 |
+
present_keys:
|
4 |
+
- Google OCR (Valid)
|
5 |
+
- OpenAI (Valid)
|
6 |
+
- Azure OpenAI (Valid)
|
7 |
+
- Palm2 (Valid)
|
8 |
+
- Gemini (Valid)
|
9 |
+
- Mistral (Valid)
|
10 |
+
- HERE Geocode (Valid)
|
11 |
+
- OpenCage Geocode (Valid)
|
app.py
CHANGED
@@ -2299,7 +2299,7 @@ st.set_page_config(layout="wide", page_icon='img/icon.ico', page_title='VoucherV
|
|
2299 |
|
2300 |
# Parse the 'is_hf' argument and set it in session state
|
2301 |
if 'is_hf' not in st.session_state:
|
2302 |
-
st.session_state['is_hf'] =
|
2303 |
|
2304 |
|
2305 |
#################################################################################################################################################
|
|
|
2299 |
|
2300 |
# Parse the 'is_hf' argument and set it in session state
|
2301 |
if 'is_hf' not in st.session_state:
|
2302 |
+
st.session_state['is_hf'] = False
|
2303 |
|
2304 |
|
2305 |
#################################################################################################################################################
|
vouchervision/API_validation.py
CHANGED
@@ -1,5 +1,4 @@
|
|
1 |
import os, io, openai, vertexai, json, tempfile
|
2 |
-
import google.generativeai as genai
|
3 |
from mistralai.client import MistralClient
|
4 |
from mistralai.models.chat_completion import ChatMessage
|
5 |
from langchain.schema import HumanMessage
|
@@ -242,8 +241,10 @@ class APIvalidation:
|
|
242 |
palm_api_key = os.getenv('PALM_API_KEY')
|
243 |
google_project_id = os.getenv('GOOGLE_PROJECT_ID')
|
244 |
google_location = os.getenv('GOOGLE_LOCATION')
|
245 |
-
|
246 |
-
vertexai.init(project=
|
|
|
|
|
247 |
|
248 |
try:
|
249 |
model = TextGenerationModel.from_pretrained("text-bison@001")
|
|
|
1 |
import os, io, openai, vertexai, json, tempfile
|
|
|
2 |
from mistralai.client import MistralClient
|
3 |
from mistralai.models.chat_completion import ChatMessage
|
4 |
from langchain.schema import HumanMessage
|
|
|
241 |
palm_api_key = os.getenv('PALM_API_KEY')
|
242 |
google_project_id = os.getenv('GOOGLE_PROJECT_ID')
|
243 |
google_location = os.getenv('GOOGLE_LOCATION')
|
244 |
+
os.environ['GOOGLE_API_KEY'] = os.getenv('PALM_API_KEY')
|
245 |
+
vertexai.init(project=os.getenv('GOOGLE_PROJECT_ID'), location=os.getenv('GOOGLE_LOCATION'))
|
246 |
+
# genai.configure(api_key=palm_api_key)
|
247 |
+
# vertexai.init(project=google_project_id, location=google_location)#, credentials=credentials)
|
248 |
|
249 |
try:
|
250 |
model = TextGenerationModel.from_pretrained("text-bison@001")
|
vouchervision/LLM_GooglePalm2.py
CHANGED
@@ -2,6 +2,8 @@ import os, time
|
|
2 |
import vertexai
|
3 |
from vertexai.language_models import TextGenerationModel
|
4 |
from vertexai.generative_models._generative_models import HarmCategory, HarmBlockThreshold
|
|
|
|
|
5 |
from langchain.output_parsers import RetryWithErrorOutputParser
|
6 |
from langchain.schema import HumanMessage
|
7 |
from langchain.prompts import PromptTemplate
|
@@ -56,7 +58,8 @@ class GooglePalm2Handler:
|
|
56 |
self.config = {
|
57 |
"max_output_tokens": 1024,
|
58 |
"temperature": self.starting_temp,
|
59 |
-
"top_p": 1
|
|
|
60 |
}
|
61 |
self.safety_settings = {
|
62 |
HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_NONE,
|
@@ -81,9 +84,7 @@ class GooglePalm2Handler:
|
|
81 |
|
82 |
def _build_model_chain_parser(self):
|
83 |
# Instantiate the parser and the retry parser
|
84 |
-
self.llm_model = ChatGoogleGenerativeAI(model=self.model_name
|
85 |
-
max_output_tokens=self.config.get('max_output_tokens'),
|
86 |
-
top_p=self.config.get('top_p'))
|
87 |
|
88 |
self.retry_parser = RetryWithErrorOutputParser.from_llm(
|
89 |
parser=self.parser,
|
@@ -93,12 +94,16 @@ class GooglePalm2Handler:
|
|
93 |
self.chain = self.prompt | self.call_google_palm2
|
94 |
|
95 |
# Define a function to format the input for Google PaLM call
|
|
|
96 |
def call_google_palm2(self, prompt_text):
|
97 |
model = TextGenerationModel.from_pretrained(self.model_name)
|
98 |
response = model.predict(prompt_text.text,
|
99 |
max_output_tokens=self.config.get('max_output_tokens'),
|
100 |
temperature=self.config.get('temperature'),
|
101 |
top_p=self.config.get('top_p'))
|
|
|
|
|
|
|
102 |
return response.text
|
103 |
|
104 |
|
|
|
2 |
import vertexai
|
3 |
from vertexai.language_models import TextGenerationModel
|
4 |
from vertexai.generative_models._generative_models import HarmCategory, HarmBlockThreshold
|
5 |
+
from vertexai.language_models import TextGenerationModel
|
6 |
+
from vertexai.preview.generative_models import GenerativeModel
|
7 |
from langchain.output_parsers import RetryWithErrorOutputParser
|
8 |
from langchain.schema import HumanMessage
|
9 |
from langchain.prompts import PromptTemplate
|
|
|
58 |
self.config = {
|
59 |
"max_output_tokens": 1024,
|
60 |
"temperature": self.starting_temp,
|
61 |
+
"top_p": 1.0,
|
62 |
+
"top_k": 40,
|
63 |
}
|
64 |
self.safety_settings = {
|
65 |
HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_NONE,
|
|
|
84 |
|
85 |
def _build_model_chain_parser(self):
|
86 |
# Instantiate the parser and the retry parser
|
87 |
+
self.llm_model = ChatGoogleGenerativeAI(model=self.model_name)
|
|
|
|
|
88 |
|
89 |
self.retry_parser = RetryWithErrorOutputParser.from_llm(
|
90 |
parser=self.parser,
|
|
|
94 |
self.chain = self.prompt | self.call_google_palm2
|
95 |
|
96 |
# Define a function to format the input for Google PaLM call
|
97 |
+
# https://cloud.google.com/vertex-ai/docs/generative-ai/migrate/migrate-palm-to-gemini?_ga=2.225326234.-1652490527.1705461451&_gac=1.186295771.1706291573.CjwKCAiAzc2tBhA6EiwArv-i6QCpx7xTP0yrBy9KKSwno3QXOWUe14mbp9RGZO0ShcbtFqyXii2PnRoCywgQAvD_BwE
|
98 |
def call_google_palm2(self, prompt_text):
|
99 |
model = TextGenerationModel.from_pretrained(self.model_name)
|
100 |
response = model.predict(prompt_text.text,
|
101 |
max_output_tokens=self.config.get('max_output_tokens'),
|
102 |
temperature=self.config.get('temperature'),
|
103 |
top_p=self.config.get('top_p'))
|
104 |
+
# model = GenerativeModel(self.model_name)
|
105 |
+
|
106 |
+
# response = model.generate_content(prompt_text.text,generation_config=self.config, safety_settings=self.safety_settings, stream=False)
|
107 |
return response.text
|
108 |
|
109 |
|
vouchervision/utils_VoucherVision.py
CHANGED
@@ -462,22 +462,21 @@ class VoucherVision():
|
|
462 |
self.has_key_google_project_location = google_project_location is not None
|
463 |
|
464 |
genai_api_key = os.getenv('PALM_API_KEY')
|
465 |
-
|
466 |
-
|
467 |
-
|
468 |
# try:
|
469 |
# if genai_api_key:
|
470 |
# genai.configure(api_key=genai_api_key)
|
471 |
# except:
|
472 |
# print(f"failed genai.configure")
|
473 |
|
474 |
-
try:
|
475 |
-
|
476 |
-
|
477 |
-
|
478 |
-
|
479 |
-
except:
|
480 |
-
|
481 |
# if self.has_key_openai:
|
482 |
# openai.api_key = openai_api_key
|
483 |
|
@@ -548,7 +547,11 @@ class VoucherVision():
|
|
548 |
os.environ['GOOGLE_PROJECT_ID'] = self.cfg_private['google_palm']['project_id'] # gemini
|
549 |
os.environ['GOOGLE_LOCATION'] = self.cfg_private['google_palm']['location'] # gemini
|
550 |
# genai.configure(api_key=self.cfg_private['google_palm']['google_palm_api'])
|
|
|
551 |
vertexai.init(project=os.environ['GOOGLE_PROJECT_ID'], location=os.environ['GOOGLE_LOCATION'])
|
|
|
|
|
|
|
552 |
# os.environ.pop("GOOGLE_APPLICATION_CREDENTIALS", None)
|
553 |
# os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = self.cfg_private['google_cloud']['path_json_file'] ####
|
554 |
# os.environ['GOOGLE_API_KEY'] = self.cfg_private['google_palm']['google_palm_api']
|
|
|
462 |
self.has_key_google_project_location = google_project_location is not None
|
463 |
|
464 |
genai_api_key = os.getenv('PALM_API_KEY')
|
465 |
+
os.environ['GOOGLE_API_KEY'] = os.getenv('PALM_API_KEY')
|
466 |
+
vertexai.init(project=os.getenv('GOOGLE_PROJECT_ID'), location=os.getenv('GOOGLE_LOCATION'))
|
|
|
467 |
# try:
|
468 |
# if genai_api_key:
|
469 |
# genai.configure(api_key=genai_api_key)
|
470 |
# except:
|
471 |
# print(f"failed genai.configure")
|
472 |
|
473 |
+
# try:
|
474 |
+
# if google_project_id and google_location:
|
475 |
+
# # clien, credentials = self.init_google_client('gemini-pro', 'v1')
|
476 |
+
# # print(credentials)
|
477 |
+
# vertexai.init(project=google_project_id, location=google_location)#, credentials=credentials)
|
478 |
+
# except:
|
479 |
+
# print(f"failed vertexai.init")
|
480 |
# if self.has_key_openai:
|
481 |
# openai.api_key = openai_api_key
|
482 |
|
|
|
547 |
os.environ['GOOGLE_PROJECT_ID'] = self.cfg_private['google_palm']['project_id'] # gemini
|
548 |
os.environ['GOOGLE_LOCATION'] = self.cfg_private['google_palm']['location'] # gemini
|
549 |
# genai.configure(api_key=self.cfg_private['google_palm']['google_palm_api'])
|
550 |
+
os.environ['GOOGLE_API_KEY'] = self.cfg_private['google_palm']['google_palm_api']
|
551 |
vertexai.init(project=os.environ['GOOGLE_PROJECT_ID'], location=os.environ['GOOGLE_LOCATION'])
|
552 |
+
|
553 |
+
|
554 |
+
|
555 |
# os.environ.pop("GOOGLE_APPLICATION_CREDENTIALS", None)
|
556 |
# os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = self.cfg_private['google_cloud']['path_json_file'] ####
|
557 |
# os.environ['GOOGLE_API_KEY'] = self.cfg_private['google_palm']['google_palm_api']
|