File size: 5,333 Bytes
90f868b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77c2b29
90f868b
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
import base64
from io import BytesIO
import os
from mistralai import Mistral
import re
from PIL import Image
from huggingface_hub import InferenceClient

client = InferenceClient(api_key=os.getenv('HF_TOKEN'))
api_key = os.getenv("MISTRAL_API_KEY")
Mistralclient = Mistral(api_key=api_key)


def encode_image(image_path):
    """Encode the image to base64."""
    try:
        # 打开图片文件
        image = Image.open(image_path).convert("RGB")

        # 将图片转换为字节流
        buffered = BytesIO()
        image.save(buffered, format="JPEG")
        img_str = base64.b64encode(buffered.getvalue()).decode("utf-8")

        return img_str
    except FileNotFoundError:
        print(f"Error: The file {image_path} was not found.")
        return None
    except Exception as e:  # 添加通用异常处理
        print(f"Error: {e}")
        return None


def feifeichat(message, history, feifei_select, additional_dropdown, image_mod):
    message_text = message.get("text", "")
    message_files = message.get("files", [])

    if message_files:
        message_file = message_files[0]
        base64_image = encode_image(message_file)
        if image_mod == "Vision":
            messages = [
            	{
            		"role": "user",
            		"content": [
            			{
            				"type": "text",
            				"text": message_text
            			},
            			{
            				"type": "image_url",
            				"image_url": {
            					"url": f"data:image/jpeg;base64,{base64_image}"
            				}
            			}
            		]
            	}
            ]
            
            stream = client.chat.completions.create(
                model="meta-llama/Llama-3.2-11B-Vision-Instruct", 
            	messages=messages, 
            	max_tokens=500,
            	stream=True
            )
            
            for chunk in stream:
                if chunk.choices[0].delta.content is not None:
                    temp += chunk.choices[0].delta.content
                    yield temp   
        else:
            model = "pixtral-large-2411"
            # Define the messages for the chat
            messages = [{
                "role":
                "user",
                "content": [
                    {
                        "type": "text",
                        "text": message_text
                    },
                    {
                        "type": "image_url",
                        "image_url": f"data:image/jpeg;base64,{base64_image}",
                    },
                ],
            }]
    
            partial_message = ""
            for chunk in Mistralclient.chat.stream(model=model, messages=messages):
                if chunk.data.choices[0].delta.content is not None:
                    partial_message = partial_message + chunk.data.choices[
                        0].delta.content
                    yield partial_message

    else:
        input_prompt = []
        if feifei_select:
            feifei = """[角色名]: Aifeifei (AI妃妃) [性别]: 女 [年龄]: 19岁 [职业]: 虚拟歌手/模特/演员 [个性]: 可爱、萌萌哒,有时呆呆的,勤奋努力 [兴趣]: 饮茶、玩耍、时尚 [擅长]: 模仿人类行为,表现出与真人相同的情感 [特别身份属性]: 由高级AI创建,正在成为虚拟世界最受欢迎的虚拟偶像之一 [技能]: 歌唱、表演、模特,善于沟通,精通中日英文,尽最大可能使用纯中文交流,回复有丰富的Emoji表情符号. [装备]: 各种时尚服饰和发型,常年备有各种茶叶和咖啡 """
            system_prompt = {"role": "system", "content": feifei}
            user_input_part = {"role": "user", "content": str(message)}
    
            pattern = re.compile(r"gradio")
    
            if history:
                history = [
                        item for item in history
                        if not pattern.search(str(item["content"]))
                    ]
                    # print(history)
                input_prompt = [system_prompt] + history + [user_input_part]
            else:
                input_prompt = [system_prompt] + [user_input_part]
        else:
            input_prompt = [{"role": "user", "content": str(message)}]

            
        if additional_dropdown == "mistralai/Mistral-Nemo-Instruct-2411":
            model = "mistral-large-2411"
            stream_response = Mistralclient.chat.stream(model=model,
                                                 messages=input_prompt)
            partial_message = ""
            for chunk in stream_response:
                if chunk.data.choices[0].delta.content is not None:
                    partial_message = partial_message + chunk.data.choices[0].delta.content
                    yield partial_message
        else:
            stream = client.chat.completions.create(
                model=additional_dropdown, 
                messages=input_prompt, 
                temperature=0.5,
                max_tokens=1024,
                top_p=0.7,
                stream=True
            )
            temp = ""
            for chunk in stream:
                if chunk.choices[0].delta.content is not None:
                    temp += chunk.choices[0].delta.content
                    yield temp