awacke1 commited on
Commit
47c116b
1 Parent(s): 444d5a2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -7
app.py CHANGED
@@ -11,13 +11,26 @@ headers = {
11
  }
12
 
13
 
14
- #import { HfInference } from '@huggingface/inference'
15
- #hf = new HfInference(API_URL, API_KEY)
16
- #prompt = 'Write instructions to teach anyone to write a discharge plan. List the entities, features and relationships to CCDA and FHIR objects in boldface.'
17
- #stream = hf.textGenerationStream({ inputs: prompt })
18
- #for await (const r of stream) {
19
- # st.markdown(r.token.text)
20
- #}
 
 
 
 
 
 
 
 
 
 
 
 
 
21
 
22
 
23
  def query(payload):
 
11
  }
12
 
13
 
14
+ from huggingface_hub import InferenceClient
15
+ endpoint_url = API_URL
16
+ hf_token = API_KEY
17
+ client = InferenceClient(endpoint_url, token=hf_token)
18
+ gen_kwargs = dict(
19
+ max_new_tokens=512,
20
+ top_k=30,
21
+ top_p=0.9,
22
+ temperature=0.2,
23
+ repetition_penalty=1.02,
24
+ stop_sequences=["\nUser:", "<|endoftext|>", "</s>"],
25
+ )
26
+ prompt = f"Write instructions to teach anyone to write a discharge plan. List the entities, features and relationships to CCDA and FHIR objects in boldface."
27
+ stream = client.text_generation(prompt, stream=True, details=True, **gen_kwargs)
28
+ for r in stream:
29
+ if r.token.special:
30
+ continue
31
+ if r.token.text in gen_kwargs["stop_sequences"]:
32
+ break
33
+ st.markdown(r.token.text, end = "")
34
 
35
 
36
  def query(payload):