File size: 2,114 Bytes
14161e7
 
49383f2
 
14161e7
49383f2
 
 
 
 
4ad9e3b
49383f2
 
1dd1ef2
 
49383f2
 
 
4ad9e3b
49383f2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14161e7
 
58ef48c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
import gradio as gr
from huggingface_hub import InferenceClient
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline

# Set the random seed for reproducibility
torch.random.manual_seed(0)

# Load the model without specifying 'device_map' for CPU usage
model = AutoModelForCausalLM.from_pretrained(
    "AdnanRiaz107/CodePhi-3-mini-0.1Klora",
    torch_dtype="auto",  # Use auto for dtype selection
    trust_remote_code=True,
    attn_implementation='eager',
    load_in_4bit= True,# Keep this if you want to use 'eager'
)

# Load the tokenizer
tokenizer = AutoTokenizer.from_pretrained("AdnanRiaz107/CodePhi-3-mini-0.1Klora")

# Create a text generation pipeline
pipe = pipeline(
    "text-generation",
    model=model,
    tokenizer=tokenizer,
)

# Generation arguments
generation_args = {
    "max_new_tokens": 500,
    "return_full_text": False,
    "temperature": 0.0,
    "do_sample": False,
}

# Gradio interface function
def generate_response(input_text):
    # Prepare the input for the model
    messages = [{"role": "user", "content": input_text}]
    
    # Generate output
    output = pipe(messages, **generation_args)
    return output[0]['generated_text']

# Create Gradio demo interface
demo = gr.Interface(
    fn=generate_response,
    inputs=gr.Textbox(
        lines=2,
        placeholder="Enter your question here...",
        label="Your Input",
    ),
    outputs=gr.Textbox(
        label="Model Response",
        placeholder="Response will be displayed here...",
    ),
    title="AI Assistant for Python Code Generation",
    description="Ask any question or request information, and the AI assistant will provide a response. Try asking about recipes, solving equations, or general inquiries.",
    examples=[
        ["Can you provide ways to eat combinations of bananas and dragonfruits?"],
        ["What about solving the equation 2x + 3 = 7?"],
        ["Tell me about the history of the internet."],
    ],
    theme="default"  # You can change the theme to "compact", "default", "huggingface", etc.
)

if __name__ == "__main__":
    demo.launch()