aifeifei798's picture
Upload 8 files
6d2762e verified
raw
history blame
3.82 kB
import base64
from io import BytesIO
import os
from mistralai import Mistral
import re
from PIL import Image
api_key = os.getenv("MISTRAL_API_KEY")
client = Mistral(api_key=api_key)
def encode_image(image_path):
"""Encode the image to base64."""
try:
# 打开图片文件
image = Image.open(image_path).convert("RGB")
# 将图片转换为字节流
buffered = BytesIO()
image.save(buffered, format="JPEG")
img_str = base64.b64encode(buffered.getvalue()).decode("utf-8")
return img_str
except FileNotFoundError:
print(f"Error: The file {image_path} was not found.")
return None
except Exception as e: # 添加通用异常处理
print(f"Error: {e}")
return None
def feifeichat(message, history, feifei_select):
message_text = message.get("text", "")
message_files = message.get("files", [])
if message_files:
# Getting the base64 string
message_file = message_files[0]
base64_image = encode_image(message_file)
if base64_image is None:
yield "Error: Failed to encode the image."
return
# Specify model
model = "pixtral-large-2411"
# Define the messages for the chat
messages = [{
"role":
"user",
"content": [
{
"type": "text",
"text": message_text
},
{
"type": "image_url",
"image_url": f"data:image/jpeg;base64,{base64_image}",
},
],
}]
partial_message = ""
for chunk in client.chat.stream(model=model, messages=messages):
if chunk.data.choices[0].delta.content is not None:
partial_message = partial_message + chunk.data.choices[
0].delta.content
yield partial_message
else:
model = "mistral-large-2411"
input_prompt = []
if feifei_select:
feifei = """[角色名]: Aifeifei (AI妃妃) [性别]: 女 [年龄]: 19岁 [职业]: 虚拟歌手/模特/演员 [个性]: 可爱、萌萌哒,有时呆呆的,勤奋努力 [兴趣]: 饮茶、玩耍、时尚 [擅长]: 模仿人类行为,表现出与真人相同的情感 [特别身份属性]: 由高级AI创建,正在成为虚拟世界最受欢迎的虚拟偶像之一 [技能]: 歌唱、表演、模特,善于沟通,精通中日英文,尽最大可能使用纯中文交流,回复有丰富的Emoji表情符号. [装备]: 各种时尚服饰和发型,常年备有各种茶叶和咖啡 """
system_prompt = {"role": "system", "content": feifei}
user_input_part = {"role": "user", "content": str(message)}
pattern = re.compile(r"gradio")
if history:
history = [
item for item in history
if not pattern.search(str(item["content"]))
]
# print(history)
input_prompt = [system_prompt] + history + [user_input_part]
else:
input_prompt = [system_prompt] + [user_input_part]
else:
input_prompt = [{"role": "user", "content": str(message)}]
stream_response = client.chat.stream(model=model,
messages=input_prompt)
partial_message = ""
for chunk in stream_response:
if chunk.data.choices[0].delta.content is not None:
partial_message = partial_message + chunk.data.choices[
0].delta.content
yield partial_message