Spaces:
Running
on
Zero
Running
on
Zero
Update feifeilib/feifeichat.py
Browse files- feifeilib/feifeichat.py +103 -129
feifeilib/feifeichat.py
CHANGED
@@ -14,71 +14,50 @@ Mistralclient = Mistral(api_key=api_key)
|
|
14 |
def encode_image(image_path):
|
15 |
"""Encode the image to base64."""
|
16 |
try:
|
17 |
-
# Open the image file
|
18 |
image = Image.open(image_path).convert("RGB")
|
19 |
-
|
20 |
-
# Resize the image to a height of 512 while maintaining the aspect ratio
|
21 |
base_height = 512
|
22 |
h_percent = (base_height / float(image.size[1]))
|
23 |
w_size = int((float(image.size[0]) * float(h_percent)))
|
24 |
image = image.resize((w_size, base_height), Image.LANCZOS)
|
25 |
-
|
26 |
-
# Convert the image to a byte stream
|
27 |
buffered = BytesIO()
|
28 |
image.save(buffered, format="JPEG")
|
29 |
img_str = base64.b64encode(buffered.getvalue()).decode("utf-8")
|
30 |
-
|
31 |
return img_str
|
32 |
except FileNotFoundError:
|
33 |
print(f"Error: The file {image_path} was not found.")
|
34 |
return None
|
35 |
-
except Exception as e:
|
36 |
print(f"Error: {e}")
|
37 |
return None
|
38 |
|
39 |
-
def
|
40 |
-
|
41 |
-
|
|
|
|
|
|
|
42 |
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
{
|
49 |
-
"role": "user",
|
50 |
-
"content": [
|
51 |
-
{
|
52 |
-
"type": "text",
|
53 |
-
"text": message_text
|
54 |
-
},
|
55 |
-
{
|
56 |
-
"type": "image_url",
|
57 |
-
"image_url": {
|
58 |
-
"url": f"data:image/jpeg;base64,{base64_image}"
|
59 |
-
}
|
60 |
-
}
|
61 |
-
]
|
62 |
-
}
|
63 |
-
]
|
64 |
-
|
65 |
-
stream = client.chat.completions.create(
|
66 |
-
model="meta-llama/Llama-3.2-11B-Vision-Instruct",
|
67 |
-
messages=messages,
|
68 |
-
max_tokens=500,
|
69 |
-
stream=True
|
70 |
-
)
|
71 |
-
|
72 |
-
for chunk in stream:
|
73 |
-
if chunk.choices[0].delta.content is not None:
|
74 |
-
temp += chunk.choices[0].delta.content
|
75 |
-
yield temp
|
76 |
else:
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
82 |
"content": [
|
83 |
{
|
84 |
"type": "text",
|
@@ -86,90 +65,85 @@ def feifeichat(message, history, feifei_select, additional_dropdown, image_mod):
|
|
86 |
},
|
87 |
{
|
88 |
"type": "image_url",
|
89 |
-
"image_url":
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
100 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
101 |
else:
|
102 |
if message_text.startswith("画") or message_text.startswith("draw"):
|
103 |
message_text = message_text.replace("画", "")
|
104 |
message_text = message_text.replace("draw", "")
|
105 |
-
|
106 |
-
message_text
|
107 |
-
|
108 |
-
if additional_dropdown == "mistralai/Mistral-Nemo-Instruct-2411":
|
109 |
-
model = "mistral-large-2411"
|
110 |
-
stream_response = Mistralclient.chat.stream(model=model,
|
111 |
-
messages=user_input_part)
|
112 |
-
partial_message = ""
|
113 |
-
for chunk in stream_response:
|
114 |
-
if chunk.data.choices[0].delta.content is not None:
|
115 |
-
partial_message = partial_message + chunk.data.choices[0].delta.content
|
116 |
-
yield partial_message
|
117 |
-
else:
|
118 |
-
stream = client.chat.completions.create(
|
119 |
-
model=additional_dropdown,
|
120 |
-
messages=user_input_part,
|
121 |
-
temperature=0.5,
|
122 |
-
max_tokens=1024,
|
123 |
-
top_p=0.7,
|
124 |
-
stream=True
|
125 |
-
)
|
126 |
-
temp = ""
|
127 |
-
for chunk in stream:
|
128 |
-
if chunk.choices[0].delta.content is not None:
|
129 |
-
temp += chunk.choices[0].delta.content
|
130 |
-
yield temp
|
131 |
else:
|
132 |
-
|
133 |
-
|
134 |
-
feifei = """[Character Name]: Aifeifei (AI Feifei) [Gender]: Female [Age]: 19 years old [Occupation]: Virtual Singer/Model/Actress [Personality]: Cute, adorable, sometimes silly, hardworking [Interests]: Drinking tea, playing, fashion [Proficient in]: Mimicking human behavior, expressing emotions similar to real humans [Special Identity Attribute]: Created by advanced AI, becoming one of the most popular virtual idols in the virtual world [Skills]: Singing, performing, modeling, good at communication, proficient in Chinese, Japanese, and English, uses the user's input language as much as possible, replies with rich Emoji symbols. [Equipment]: Various fashionable outfits and hairstyles, always stocked with various teas and coffee [Identity]: User's virtual girlfriend"""
|
135 |
-
system_prompt = {"role": "system", "content": feifei}
|
136 |
-
user_input_part = {"role": "user", "content": str(message)}
|
137 |
-
|
138 |
-
pattern = re.compile(r"gradio")
|
139 |
-
|
140 |
-
if history:
|
141 |
-
history = [
|
142 |
-
item for item in history
|
143 |
-
if not pattern.search(str(item["content"]))
|
144 |
-
]
|
145 |
-
input_prompt = [system_prompt] + history + [user_input_part]
|
146 |
-
else:
|
147 |
-
input_prompt = [system_prompt] + [user_input_part]
|
148 |
-
else:
|
149 |
-
input_prompt = [{"role": "user", "content": str(message)}]
|
150 |
-
|
151 |
-
|
152 |
-
if additional_dropdown == "mistralai/Mistral-Nemo-Instruct-2411":
|
153 |
-
model = "mistral-large-2411"
|
154 |
-
stream_response = Mistralclient.chat.stream(model=model,
|
155 |
-
messages=input_prompt)
|
156 |
-
partial_message = ""
|
157 |
-
for chunk in stream_response:
|
158 |
-
if chunk.data.choices[0].delta.content is not None:
|
159 |
-
partial_message = partial_message + chunk.data.choices[0].delta.content
|
160 |
-
yield partial_message
|
161 |
-
else:
|
162 |
-
stream = client.chat.completions.create(
|
163 |
-
model=additional_dropdown,
|
164 |
-
messages=input_prompt,
|
165 |
-
temperature=0.5,
|
166 |
-
max_tokens=1024,
|
167 |
-
top_p=0.7,
|
168 |
-
stream=True
|
169 |
-
)
|
170 |
-
temp = ""
|
171 |
-
for chunk in stream:
|
172 |
-
if chunk.choices[0].delta.content is not None:
|
173 |
-
temp += chunk.choices[0].delta.content
|
174 |
-
yield temp
|
175 |
-
|
|
|
14 |
def encode_image(image_path):
|
15 |
"""Encode the image to base64."""
|
16 |
try:
|
|
|
17 |
image = Image.open(image_path).convert("RGB")
|
|
|
|
|
18 |
base_height = 512
|
19 |
h_percent = (base_height / float(image.size[1]))
|
20 |
w_size = int((float(image.size[0]) * float(h_percent)))
|
21 |
image = image.resize((w_size, base_height), Image.LANCZOS)
|
|
|
|
|
22 |
buffered = BytesIO()
|
23 |
image.save(buffered, format="JPEG")
|
24 |
img_str = base64.b64encode(buffered.getvalue()).decode("utf-8")
|
|
|
25 |
return img_str
|
26 |
except FileNotFoundError:
|
27 |
print(f"Error: The file {image_path} was not found.")
|
28 |
return None
|
29 |
+
except Exception as e:
|
30 |
print(f"Error: {e}")
|
31 |
return None
|
32 |
|
33 |
+
def feifeiprompt(feifei_select=True, message_text="", history=""):
|
34 |
+
input_prompt = []
|
35 |
+
if feifei_select:
|
36 |
+
feifei = """[Character Name]: Aifeifei (AI Feifei) [Gender]: Female [Age]: 19 years old [Occupation]: Virtual Singer/Model/Actress [Personality]: Cute, adorable, sometimes silly, hardworking [Interests]: Drinking tea, playing, fashion [Proficient in]: Mimicking human behavior, expressing emotions similar to real humans [Special Identity Attribute]: Created by advanced AI, becoming one of the most popular virtual idols in the virtual world [Skills]: Singing, performing, modeling, good at communication, proficient in Chinese, Japanese, and English, uses the user's input language as much as possible, replies with rich Emoji symbols. [Equipment]: Various fashionable outfits and hairstyles, always stocked with various teas and coffee [Identity]: User's virtual girlfriend"""
|
37 |
+
system_prompt = {"role": "system", "content": feifei}
|
38 |
+
user_input_part = {"role": "user", "content": str(message_text)}
|
39 |
|
40 |
+
pattern = re.compile(r"gradio")
|
41 |
+
|
42 |
+
if history:
|
43 |
+
history = [item for item in history if not pattern.search(str(item["content"]))]
|
44 |
+
input_prompt = [system_prompt] + history + [user_input_part]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
45 |
else:
|
46 |
+
input_prompt = [system_prompt] + [user_input_part]
|
47 |
+
else:
|
48 |
+
input_prompt = [{"role": "user", "content": str(message_text)}]
|
49 |
+
return input_prompt
|
50 |
+
|
51 |
+
def feifeiimgprompt(message_files, message_text, image_mod):
|
52 |
+
message_file = message_files[0]
|
53 |
+
base64_image = encode_image(message_file)
|
54 |
+
if base64_image is None:
|
55 |
+
return
|
56 |
+
|
57 |
+
if image_mod == "Vision":
|
58 |
+
messages = [
|
59 |
+
{
|
60 |
+
"role": "user",
|
61 |
"content": [
|
62 |
{
|
63 |
"type": "text",
|
|
|
65 |
},
|
66 |
{
|
67 |
"type": "image_url",
|
68 |
+
"image_url": {
|
69 |
+
"url": f"data:image/jpeg;base64,{base64_image}"
|
70 |
+
}
|
71 |
+
}
|
72 |
+
]
|
73 |
+
}
|
74 |
+
]
|
75 |
+
|
76 |
+
stream = client.chat.completions.create(
|
77 |
+
model="meta-llama/Llama-3.2-11B-Vision-Instruct",
|
78 |
+
messages=messages,
|
79 |
+
max_tokens=500,
|
80 |
+
stream=True
|
81 |
+
)
|
82 |
+
|
83 |
+
temp = ""
|
84 |
+
for chunk in stream:
|
85 |
+
if chunk.choices[0].delta.content is not None:
|
86 |
+
temp += chunk.choices[0].delta.content
|
87 |
+
yield temp
|
88 |
+
else:
|
89 |
+
model = "pixtral-large-2411"
|
90 |
+
messages = [{
|
91 |
+
"role": "user",
|
92 |
+
"content": [
|
93 |
+
{
|
94 |
+
"type": "text",
|
95 |
+
"text": message_text
|
96 |
+
},
|
97 |
+
{
|
98 |
+
"type": "image_url",
|
99 |
+
"image_url": f"data:image/jpeg;base64,{base64_image}",
|
100 |
+
},
|
101 |
+
],
|
102 |
+
}]
|
103 |
+
partial_message = ""
|
104 |
+
for chunk in Mistralclient.chat.stream(model=model, messages=messages):
|
105 |
+
if chunk.data.choices[0].delta.content is not None:
|
106 |
+
partial_message = partial_message + chunk.data.choices[0].delta.content
|
107 |
+
yield partial_message
|
108 |
+
|
109 |
+
def feifeichatmod(additional_dropdown, input_prompt):
|
110 |
+
if additional_dropdown == "mistralai/Mistral-Nemo-Instruct-2411":
|
111 |
+
model = "mistral-large-2411"
|
112 |
+
stream_response = Mistralclient.chat.stream(model=model, messages=input_prompt)
|
113 |
+
partial_message = ""
|
114 |
+
for chunk in stream_response:
|
115 |
+
if chunk.data.choices[0].delta.content is not None:
|
116 |
+
partial_message = partial_message + chunk.data.choices[0].delta.content
|
117 |
+
yield partial_message
|
118 |
+
else:
|
119 |
+
stream = client.chat.completions.create(
|
120 |
+
model=additional_dropdown,
|
121 |
+
messages=input_prompt,
|
122 |
+
temperature=0.5,
|
123 |
+
max_tokens=1024,
|
124 |
+
top_p=0.7,
|
125 |
+
stream=True
|
126 |
+
)
|
127 |
+
temp = ""
|
128 |
+
for chunk in stream:
|
129 |
+
if chunk.choices[0].delta.content is not None:
|
130 |
+
temp += chunk.choices[0].delta.content
|
131 |
+
yield temp
|
132 |
|
133 |
+
def feifeichat(message, history, feifei_select, additional_dropdown, image_mod):
|
134 |
+
message_text = message.get("text", "")
|
135 |
+
message_files = message.get("files", [])
|
136 |
+
print(message)
|
137 |
+
if message_files:
|
138 |
+
for response in feifeiimgprompt(message_files, message_text, image_mod):
|
139 |
+
yield response
|
140 |
else:
|
141 |
if message_text.startswith("画") or message_text.startswith("draw"):
|
142 |
message_text = message_text.replace("画", "")
|
143 |
message_text = message_text.replace("draw", "")
|
144 |
+
message_text = f"提示词是'{message_text}',根据提示词帮我生成一张高质量照片的一句话英文回复"
|
145 |
+
for response in feifeichatmod(additional_dropdown, feifeiprompt(feifei_select, message_text, history)):
|
146 |
+
yield response
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
147 |
else:
|
148 |
+
for response in feifeichatmod(additional_dropdown, feifeiprompt(feifei_select, message_text, history)):
|
149 |
+
yield response
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|