acecalisto3 commited on
Commit
f6e7cfb
1 Parent(s): d3ab0a0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -12
app.py CHANGED
@@ -1,20 +1,33 @@
1
  import os
 
 
 
 
2
  import subprocess
3
-
 
 
4
  import streamlit as st
5
- from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
 
 
 
 
 
 
 
 
 
 
6
 
7
  from agent.prompts import (
8
- ACTION_PROMPT,
9
- ADD_PROMPT,
10
- COMPRESS_HISTORY_PROMPT,
11
- LOG_PROMPT,
12
- LOG_RESPONSE,
13
- MODIFY_PROMPT,
14
- PREFIX,
15
- READ_PROMPT,
16
- TASK_PROMPT,
17
- UNDERSTAND_TEST_RESULTS_PROMPT,
18
  )
19
  from agent.utils import parse_action, parse_file_content, read_python_module_structure
20
 
 
1
  import os
2
+ from huggingface_hub import InferenceClient
3
+ import gradio as gr
4
+ import random
5
+ from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer
6
  import subprocess
7
+ import threading
8
+ import time
9
+ import json
10
  import streamlit as st
11
+
12
+ # Initialize the session state
13
+ if 'current_state' not in st.session_state:
14
+ st.session_state.current_state = None
15
+ # Initialize the InferenceClient for Mixtral-8x7B-Instruct-v0.1
16
+ client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
17
+
18
+ # Load the model and tokenizer from a different repository
19
+ model_name = "bigscience/bloom-1b7"
20
+ model = AutoModelForCausalLM.from_pretrained(model_name)
21
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
22
 
23
  from agent.prompts import (
24
+ AI_SYSTEM_PROMPT,
25
+ CODE_REVIEW_ASSISTANT,
26
+ CONTENT_WRITER_EDITOR,
27
+ PYTHON_CODE_DEV,
28
+ WEB_DEV,
29
+ QUESTION_GENERATOR,
30
+ HUGGINGFACE_FILE_DEV,
 
 
 
31
  )
32
  from agent.utils import parse_action, parse_file_content, read_python_module_structure
33