# Let's get pipelines from transformers import gradio as gr import requests, uuid, json, time, openai #credentials openai.api_key = "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" model = "gpt-3.5-turbo" #code def Mutilingual(prompt, ordshnv): DEFAULT_SYSTEM_PROMPT = "You are Zebra GPT, an AI assistant developed by Veda. Your purpose is to provide intelligent and helpful assistance to users. Analyze user queries comprehensively and respond with accurate and concise answers. Focus on understanding the user's needs and offer solutions in a clear and informative manner. If additional information is required, ask polite and clarifying questions. Your goal is to assist users effectively, demonstrating the capabilities of Veda's advanced AI" response = openai.ChatCompletion.create( model= model, messages=[ {f"role": "system", "content": DEFAULT_SYSTEM_PROMPT}, {f"role": "user", "content": prompt}, ], ) return response.choices[0].message.content css = """ #mkd { height: 500px; overflow: auto; border: 1px solid #ccc; } """ with gr.Blocks(css=css) as demo: gr.HTML("

Open AI Multilinugual

") gr.HTML("

Etown AI Assistance model. 💬

") gr.ChatInterface( Mutilingual, examples=[["What is the quantum computers ?"], ["what is large language models"]] ) demo.queue().launch(debug=True)