Spaces:
Running
Running
scott12355
commited on
Commit
·
07d17d6
1
Parent(s):
10f7bb7
add post for chat history
Browse files- __pycache__/app.cpython-39.pyc +0 -0
- app.py +15 -1
- requirements.txt +2 -1
__pycache__/app.cpython-39.pyc
ADDED
Binary file (1.64 kB). View file
|
|
app.py
CHANGED
@@ -1,6 +1,8 @@
|
|
1 |
from fastapi import FastAPI
|
2 |
from transformers import pipeline
|
3 |
import torch
|
|
|
|
|
4 |
if torch.backends.mps.is_available():
|
5 |
device = torch.device("mps")
|
6 |
elif torch.cuda.is_available():
|
@@ -15,6 +17,9 @@ modelName = "Qwen/Qwen2.5-1.5B-Instruct" #Qwen/Qwen2.5-1.5B-Instruct
|
|
15 |
pipe = pipeline("text-generation", model=modelName, device=device, batch_size=8)
|
16 |
sentiment_model = pipeline("sentiment-analysis", device=device)
|
17 |
|
|
|
|
|
|
|
18 |
|
19 |
|
20 |
@app.get("/")
|
@@ -36,4 +41,13 @@ def generate(text: str):
|
|
36 |
# print(output)
|
37 |
|
38 |
print(output)
|
39 |
-
return {"output": output[0]["generated_text"][-1]['content']}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
from fastapi import FastAPI
|
2 |
from transformers import pipeline
|
3 |
import torch
|
4 |
+
from pydantic import BaseModel
|
5 |
+
from typing import List, Dict
|
6 |
if torch.backends.mps.is_available():
|
7 |
device = torch.device("mps")
|
8 |
elif torch.cuda.is_available():
|
|
|
17 |
pipe = pipeline("text-generation", model=modelName, device=device, batch_size=8)
|
18 |
sentiment_model = pipeline("sentiment-analysis", device=device)
|
19 |
|
20 |
+
class ChatRequest(BaseModel):
|
21 |
+
conversationHistory: List[Dict[str, str]]
|
22 |
+
|
23 |
|
24 |
|
25 |
@app.get("/")
|
|
|
41 |
# print(output)
|
42 |
|
43 |
print(output)
|
44 |
+
return {"output": output[0]["generated_text"][-1]['content']}
|
45 |
+
|
46 |
+
@app.post("/chat")
|
47 |
+
def chat(request: ChatRequest):
|
48 |
+
"""
|
49 |
+
Generate reposnse form the NLP Model.
|
50 |
+
"""
|
51 |
+
|
52 |
+
output = pipe(request.conversationHistory, num_return_sequences=1, max_new_tokens=250)
|
53 |
+
return output
|
requirements.txt
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
fastapi==0.99.1
|
2 |
uvicorn
|
3 |
transformers
|
4 |
-
torch
|
|
|
|
1 |
fastapi==0.99.1
|
2 |
uvicorn
|
3 |
transformers
|
4 |
+
torch
|
5 |
+
pydantic
|