Spaces:
Runtime error
Runtime error
Update crew.py
Browse files
crew.py
CHANGED
@@ -53,7 +53,7 @@ class NewsletterGenCrew:
|
|
53 |
|
54 |
# llm = ChatGroq(model="llama3-70b-8192")
|
55 |
# https://console.groq.com/docs/rate-limits
|
56 |
-
|
57 |
|
58 |
# llm = ChatGoogleGenerativeAI(google_api_key=os.getenv("GOOGLE_API_KEY"))
|
59 |
|
@@ -61,7 +61,7 @@ class NewsletterGenCrew:
|
|
61 |
# supports many more optional parameters. Hover on your `ChatOllama(...)`
|
62 |
# class to view the latest available supported parameters
|
63 |
# llm = ChatOllama(model="llama3")
|
64 |
-
llm = ChatOllama(model="mistral:latest")
|
65 |
# check if ollama is running and which LLMs can then be used, run this in Anaconda cmd admin window:
|
66 |
# ollama list
|
67 |
# OUTPUT EXAMPLE:
|
|
|
53 |
|
54 |
# llm = ChatGroq(model="llama3-70b-8192")
|
55 |
# https://console.groq.com/docs/rate-limits
|
56 |
+
llm = ChatGroq(model="mixtral-8x7b-32768") # JB 13-06-2024 - geeft af en toe rate limit errors
|
57 |
|
58 |
# llm = ChatGoogleGenerativeAI(google_api_key=os.getenv("GOOGLE_API_KEY"))
|
59 |
|
|
|
61 |
# supports many more optional parameters. Hover on your `ChatOllama(...)`
|
62 |
# class to view the latest available supported parameters
|
63 |
# llm = ChatOllama(model="llama3")
|
64 |
+
# llm = ChatOllama(model="mistral:latest")
|
65 |
# check if ollama is running and which LLMs can then be used, run this in Anaconda cmd admin window:
|
66 |
# ollama list
|
67 |
# OUTPUT EXAMPLE:
|