File size: 1,999 Bytes
f44db6c
 
 
 
63b0d2d
f44db6c
 
 
f46ac02
 
f44db6c
 
a6318fa
47c116b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a6318fa
 
f44db6c
f46ac02
9264b57
f46ac02
 
 
 
 
 
a6318fa
f46ac02
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import requests
import streamlit as st
import os

API_URL = 'https://qe55p8afio98s0u3.us-east-1.aws.endpoints.huggingface.cloud'
API_KEY = os.getenv('API_KEY')

headers = {
    "Authorization": f"Bearer {API_KEY}",
    "Content-Type": "application/json"
}


from huggingface_hub import InferenceClient
endpoint_url = API_URL
hf_token = API_KEY
client = InferenceClient(endpoint_url, token=hf_token)
gen_kwargs = dict(
    max_new_tokens=512,
    top_k=30,
    top_p=0.9,
    temperature=0.2,
    repetition_penalty=1.02,
    stop_sequences=["\nUser:", "<|endoftext|>", "</s>"],
)
prompt = f"Write instructions to teach anyone to write a discharge plan. List the entities, features and relationships to CCDA and FHIR objects in boldface."
stream = client.text_generation(prompt, stream=True, details=True, **gen_kwargs)
for r in stream:
    if r.token.special:
        continue
    if r.token.text in gen_kwargs["stop_sequences"]:
        break
    st.markdown(r.token.text, end = "")


def query(payload):
    response = requests.post(API_URL, headers=headers, json=payload)
    st.markdown(response.json())
    return response.json()

def get_output(prompt):
    return query({"inputs": prompt})

def main():
    st.title("Medical Llama Test Bench with Inference Endpoints Llama 7B")
    example_input = st.text_input("Enter your example text:")

    if st.button("Summarize with Variation 1"):
        prompt = f"Write instructions to teach anyone to write a discharge plan. List the entities, features and relationships to CCDA and FHIR objects in boldface. {example_input}"
        output = get_output(prompt)
        st.markdown(f"**Output:** {output}")

    if st.button("Summarize with Variation 2"):
        prompt = f"Provide a summary of the medical transcription. Highlight the important entities, features, and relationships to CCDA and FHIR objects. {example_input}"
        output = get_output(prompt)
        st.markdown(f"**Output:** {output}")

if __name__ == "__main__":
    main()