Spaces:
Sleeping
Sleeping
# cp .env.example .env | |
# Edit your .env file with your own values | |
# Don't commit your .env file to git/push to GitHub! | |
# Don't modify/delete .env.example unless adding extensions to the project | |
# which require new variable to be added to the .env file | |
# API CONFIG | |
# OPENAI_API_MODEL can be used instead | |
# Special values: | |
# human - use human as intermediary with custom LLMs | |
# llama - use llama.cpp with Llama, Alpaca, Vicuna, GPT4All, etc | |
LLM_MODEL=gpt-3.5-turbo # alternatively, gpt-4, text-davinci-003, etc | |
LLAMA_MODEL_PATH= # ex. models/llama-13B/ggml-model.bin | |
#LLAMA_THREADS_NUM=8 # Set the number of threads for llama (optional) | |
OPENAI_API_KEY= | |
OPENAI_TEMPERATURE=0.0 | |
# STORE CONFIG | |
# TABLE_NAME can be used instead | |
RESULTS_STORE_NAME=baby-agi-test-table | |
# Weaviate config | |
# Uncomment and fill these to switch from local ChromaDB to Weaviate | |
# WEAVIATE_USE_EMBEDDED=true | |
# WEAVIATE_URL= | |
# WEAVIATE_API_KEY= | |
# Pinecone config | |
# Uncomment and fill these to switch from local ChromaDB to Pinecone | |
# PINECONE_API_KEY= | |
# PINECONE_ENVIRONMENT= | |
# COOPERATIVE MODE CONFIG | |
# BABY_NAME can be used instead | |
INSTANCE_NAME=BabyAGI | |
COOPERATIVE_MODE=none # local | |
# RUN CONFIG | |
OBJECTIVE=Solve world hunger | |
# For backwards compatibility | |
# FIRST_TASK can be used instead of INITIAL_TASK | |
INITIAL_TASK=Develop a task list | |
# Extensions | |
# List additional extension .env files to load (except .env.example!) | |
DOTENV_EXTENSIONS= | |
# Set to true to enable command line args support | |
ENABLE_COMMAND_LINE_ARGS=false | |