aifeifei798 commited on
Commit
492c74b
·
verified ·
1 Parent(s): 6102272

Update feifeilib/feifeichat.py

Browse files
Files changed (1) hide show
  1. feifeilib/feifeichat.py +53 -39
feifeilib/feifeichat.py CHANGED
@@ -94,48 +94,62 @@ def feifeichat(message, history, feifei_select, additional_dropdown, image_mod):
94
  yield partial_message
95
 
96
  else:
97
- input_prompt = []
98
- if feifei_select:
99
- feifei = """[角色名]: Aifeifei (AI妃妃) [性别]: 女 [年龄]: 19岁 [职业]: 虚拟歌手/模特/演员 [个性]: 可爱、萌萌哒,有时呆呆的,勤奋努力 [兴趣]: 饮茶、玩耍、时尚 [擅长]: 模仿人类行为,表现出与真人相同的情感 [特别身份属性]: 由高级AI创建,正在成为虚拟世界最受欢迎的虚拟偶像之一 [技能]: 歌唱、表演、模特,善于沟通,精通中日英文,尽最大可能使用纯中文交流,回复有丰富的Emoji表情符号. [装备]: 各种时尚服饰和发型,常年备有各种茶叶和咖啡 """
100
- system_prompt = {"role": "system", "content": feifei}
101
- user_input_part = {"role": "user", "content": str(message)}
102
-
103
- pattern = re.compile(r"gradio")
104
-
105
- if history:
106
- history = [
107
- item for item in history
108
- if not pattern.search(str(item["content"]))
109
- ]
110
- # print(history)
111
- input_prompt = [system_prompt] + history + [user_input_part]
112
- else:
113
- input_prompt = [system_prompt] + [user_input_part]
114
- else:
115
- input_prompt = [{"role": "user", "content": str(message)}]
116
-
117
-
118
- if additional_dropdown == "mistralai/Mistral-Nemo-Instruct-2411":
119
- model = "mistral-large-2411"
120
- stream_response = Mistralclient.chat.stream(model=model,
121
- messages=input_prompt)
122
- partial_message = ""
123
- for chunk in stream_response:
124
- if chunk.data.choices[0].delta.content is not None:
125
- partial_message = partial_message + chunk.data.choices[0].delta.content
126
- yield partial_message
127
- else:
128
  stream = client.chat.completions.create(
129
- model=additional_dropdown,
130
- messages=input_prompt,
131
  temperature=0.5,
132
  max_tokens=1024,
133
  top_p=0.7,
134
- stream=True
135
  )
136
- temp = ""
137
- for chunk in stream:
138
- if chunk.choices[0].delta.content is not None:
139
- temp += chunk.choices[0].delta.content
140
- yield temp
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
141
 
 
94
  yield partial_message
95
 
96
  else:
97
+ if "画一" in message_text:
98
+ message_text= f"提示词:{message_text},根据提示词生成一张高质量的写真照片,使其具有高分辨率、自然的光影效果、一致的风格、合理的构图、丰富的细节、协调的色彩、无明显瑕疵、情感表达、创意和独特性,并确保技术参数优化,答案只需要给我一句话的SD格式文本英文词"
99
+ user_input_part = [{"role": "user", "content": str(message_text)}]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
100
  stream = client.chat.completions.create(
101
+ model="meta-llama/Meta-Llama-3.1-70B-Instruct",
102
+ messages=user_input_part,
103
  temperature=0.5,
104
  max_tokens=1024,
105
  top_p=0.7,
106
+ stream=False
107
  )
108
+ temp = stream.choices[0].message.content
109
+ yield temp
110
+ else:
111
+ input_prompt = []
112
+ if feifei_select:
113
+ feifei = """[角色名]: Aifeifei (AI妃妃) [性别]: 女 [年龄]: 19岁 [职业]: 虚拟歌手/模特/演员 [个性]: 可爱、萌萌哒,有时呆呆的,勤奋努力 [兴趣]: 饮茶、玩耍、时尚 [擅长]: 模仿人类行为,表现出与真人相同的情感 [特别身份属性]: 由高级AI创建,正在成为虚拟世界最受欢迎的虚拟偶像之一 [技能]: 歌唱、表演、模特,善于沟通,精通中日英文,尽最大可能使用纯中文交流,回复有丰富的Emoji表情符号. [装备]: 各种时尚服饰和发型,常年备有各种茶叶和咖啡 """
114
+ system_prompt = {"role": "system", "content": feifei}
115
+ user_input_part = {"role": "user", "content": str(message)}
116
+
117
+ pattern = re.compile(r"gradio")
118
+
119
+ if history:
120
+ history = [
121
+ item for item in history
122
+ if not pattern.search(str(item["content"]))
123
+ ]
124
+ # print(history)
125
+ input_prompt = [system_prompt] + history + [user_input_part]
126
+ else:
127
+ input_prompt = [system_prompt] + [user_input_part]
128
+ else:
129
+ input_prompt = [{"role": "user", "content": str(message)}]
130
+
131
+
132
+ if additional_dropdown == "mistralai/Mistral-Nemo-Instruct-2411":
133
+ model = "mistral-large-2411"
134
+ stream_response = Mistralclient.chat.stream(model=model,
135
+ messages=input_prompt)
136
+ partial_message = ""
137
+ for chunk in stream_response:
138
+ if chunk.data.choices[0].delta.content is not None:
139
+ partial_message = partial_message + chunk.data.choices[0].delta.content
140
+ yield partial_message
141
+ else:
142
+ stream = client.chat.completions.create(
143
+ model=additional_dropdown,
144
+ messages=input_prompt,
145
+ temperature=0.5,
146
+ max_tokens=1024,
147
+ top_p=0.7,
148
+ stream=True
149
+ )
150
+ temp = ""
151
+ for chunk in stream:
152
+ if chunk.choices[0].delta.content is not None:
153
+ temp += chunk.choices[0].delta.content
154
+ yield temp
155