{ "cells": [ { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "import anvil.server\n", "import openai\n", "import pathlib\n", "import textwrap\n", "import google.generativeai as genai" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def call_gemini(text,key):\n", " # response=f'calling gemini with key {key} and text {text}'\n", " # return response\n", " genai.configure(api_key=key)\n", " model = genai.GenerativeModel('gemini-pro')\n", " response = model.generate_content(text)\n", " return response.text" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "def call_gpt(prompt,key,model):\n", " openai.api_key=key\n", " try:\n", " messages=[{\"role\": \"system\", \"content\": \"You are a helpful assistant.\"}]\n", " messages+=[{\"role\": \"user\", \"content\": prompt}]\n", " completions=openai.chat.completions.create( #for new version >.28 ) \n", " # completions=openai.ChatCompletion.create(\n", " model=model, \n", " messages=messages)\n", " # prediction=completions['choices'][0]['message']['content']\n", " prediction=completions.choices[0].message.content.strip() # for new version >.28\n", " except Exception as e:\n", " return -1,str(e)\n", " return 0,prediction" ] } ], "metadata": { "kernelspec": { "display_name": "py310all", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.13" } }, "nbformat": 4, "nbformat_minor": 2 }