File size: 1,154 Bytes
adc37b6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39

# Load model directly
from transformers import pipeline
import gradio as gr
import torch
 
# Check if CUDA is available, otherwise use CPU
device = "cuda" if torch.cuda.is_available() else "cpu"

pipe = pipeline("text-generation", model="microsoft/BioGPT-Large", device=device)

def question(message, history):


    # Generate the response
    response = pipe(message, max_length=200)[0]['generated_text'] 

    return response

#Description in Markdown
description = """
# Summary
This chat directly pipes into this BioGPT Large LLM.  This LLM outputs some strange things and can be found here: [Microsoft BioGPT Large](https://huggingface.co/microsoft/BioGPT-Large). To use this LLM and derive any value, think of it as a neural network trying to complete a problem. See the examples for ideas.

### Examples
* HIV is
* Foot Fungus causes
* Symptoms of liver failure are

### Good Luck! 🍀
Coded 🧾 by [Matthew Rogers](https://matthewrogers.org) | [RamboRogers](https://github.com/ramboRogers)
"""



program = gr.ChatInterface(question,description=description,title="Microsoft BioGPT Large Chat")

if __name__ == "__main__":
    program.launch()