Spaces:
Paused
Paused
import sys | |
print(sys.path) | |
from flask import Flask, request, jsonify, render_template, send_from_directory | |
from flask_cors import CORS | |
import os | |
from llama3 import LlaMa3 # 导入您的 LlaMa3 类 | |
app = Flask(__name__) | |
CORS(app) | |
# 实例化 LlaMa3 模型 | |
llama3_model = LlaMa3() | |
def index(): | |
# 返回 HTML 页面 | |
return render_template('index.html') | |
def chat(): | |
# 获取前端发送的用户消息 | |
user_message = request.json.get('message', '') | |
if not user_message.strip(): | |
return jsonify({"response": "请输入有效内容!"}), 400 | |
try: | |
# 构造聊天上下文 | |
messages = [{"role": "user", "content": user_message}] | |
# 调用 LlaMa3 的 chat 方法生成回复 | |
ai_response = llama3_model.chat(messages) | |
# 返回 AI 的回复 | |
return jsonify({"response": ai_response}) | |
except Exception as e: | |
print(f"Error during llama3 call: {e}") | |
return jsonify({"response": "发生错误,请稍后重试!"}), 500 | |
def favicon(): | |
return send_from_directory(os.path.join(app.root_path, 'static'), | |
'favicon.ico', mimetype='image/vnd.microsoft.icon') | |
if __name__ == '__main__': | |
app.run(debug=True, host='127.0.0.1', port=5000) |