Spaces:
Sleeping
Sleeping
pierrelissope
commited on
Commit
·
743fc77
1
Parent(s):
f15562f
final update
Browse files- back.py +0 -50
- back/client.py +116 -0
- back/server_deploy.py +92 -0
- front/dist/index.html +1 -1
- front/src/Homescene.tsx +1 -1
- front/src/VerificationScene.tsx +5 -3
- front/src/components/ResultContainer.tsx +8 -2
- main.py +6 -4
back.py
DELETED
@@ -1,50 +0,0 @@
|
|
1 |
-
import time
|
2 |
-
from pydantic import BaseModel
|
3 |
-
import base64
|
4 |
-
from fastapi import FastAPI, APIRouter
|
5 |
-
from fastapi.middleware.cors import CORSMiddleware
|
6 |
-
|
7 |
-
app = FastAPI()
|
8 |
-
|
9 |
-
origins = [
|
10 |
-
"http://localhost:7860",
|
11 |
-
"https://example.com",
|
12 |
-
]
|
13 |
-
|
14 |
-
app.add_middleware(
|
15 |
-
CORSMiddleware,
|
16 |
-
allow_origins=origins,
|
17 |
-
allow_credentials=True,
|
18 |
-
allow_methods=["*"],
|
19 |
-
allow_headers=["*"],
|
20 |
-
)
|
21 |
-
|
22 |
-
class ImageData(BaseModel):
|
23 |
-
image: str
|
24 |
-
|
25 |
-
class ImagesData(BaseModel):
|
26 |
-
idCard: str
|
27 |
-
profileImage: str
|
28 |
-
|
29 |
-
@app.post("/uploadpdf")
|
30 |
-
async def upload_pdf(image_data: ImageData):
|
31 |
-
header, encoded = image_data.image.split(',', 1)
|
32 |
-
binary_data = base64.b64decode(encoded)
|
33 |
-
|
34 |
-
time.sleep(20);
|
35 |
-
|
36 |
-
return {"message": "Image reçue et sauvegardée"}
|
37 |
-
|
38 |
-
@app.post("/uploadids")
|
39 |
-
async def upload_ids(images_data: ImagesData):
|
40 |
-
header, encoded = images_data.idCard.split(',', 1)
|
41 |
-
id_card_binary_data = base64.b64decode(encoded)
|
42 |
-
|
43 |
-
header, encoded = images_data.idCard.split(',', 1)
|
44 |
-
profile_image_binary_data = base64.b64decode(encoded)
|
45 |
-
|
46 |
-
time.sleep(20);
|
47 |
-
|
48 |
-
return {"message": "Image reçue et sauvegardée"}
|
49 |
-
|
50 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
back/client.py
ADDED
@@ -0,0 +1,116 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
"""Client script.
|
3 |
+
|
4 |
+
This script does the following:
|
5 |
+
- Query crypto-parameters and pre/post-processing parameters (client.zip)
|
6 |
+
- Quantize the inputs using the parameters
|
7 |
+
- Encrypt data using the crypto-parameters
|
8 |
+
- Send the encrypted data to the server (async using grequests)
|
9 |
+
- Collect the data and decrypt it
|
10 |
+
- De-quantize the decrypted results
|
11 |
+
"""
|
12 |
+
|
13 |
+
import io
|
14 |
+
import os
|
15 |
+
import sys
|
16 |
+
from pathlib import Path
|
17 |
+
|
18 |
+
import grequests
|
19 |
+
import numpy
|
20 |
+
import requests
|
21 |
+
import torch
|
22 |
+
import torchvision
|
23 |
+
import torchvision.transforms as transforms
|
24 |
+
|
25 |
+
from concrete.ml.deployment import FHEModelClient
|
26 |
+
|
27 |
+
PORT = os.environ.get("PORT", "5000")
|
28 |
+
IP = os.environ.get("IP", "localhost")
|
29 |
+
URL = os.environ.get("URL", f"http://{IP}:{PORT}")
|
30 |
+
NUM_SAMPLES = int(os.environ.get("NUM_SAMPLES", 1))
|
31 |
+
STATUS_OK = 200
|
32 |
+
|
33 |
+
|
34 |
+
def main():
|
35 |
+
# Get the necessary data for the client
|
36 |
+
# client.zip
|
37 |
+
|
38 |
+
train_sub_set = ...
|
39 |
+
|
40 |
+
zip_response = requests.get(f"{URL}/get_client")
|
41 |
+
assert zip_response.status_code == STATUS_OK
|
42 |
+
with open("./client.zip", "wb") as file:
|
43 |
+
file.write(zip_response.content)
|
44 |
+
|
45 |
+
# Get the data to infer
|
46 |
+
X = train_sub_set[:1]
|
47 |
+
|
48 |
+
# Create the client
|
49 |
+
client = FHEModelClient(path_dir="./", key_dir="./keys")
|
50 |
+
|
51 |
+
# The client first need to create the private and evaluation keys.
|
52 |
+
serialized_evaluation_keys = client.get_serialized_evaluation_keys()
|
53 |
+
|
54 |
+
assert isinstance(serialized_evaluation_keys, bytes)
|
55 |
+
|
56 |
+
# Evaluation keys can be quite large files but only have to be shared once with the server.
|
57 |
+
|
58 |
+
# Check the size of the evaluation keys (in MB)
|
59 |
+
print(f"Evaluation keys size: {sys.getsizeof(serialized_evaluation_keys) / 1024 / 1024:.2f} MB")
|
60 |
+
|
61 |
+
# Update all base64 queries encodings with UploadFile
|
62 |
+
response = requests.post(
|
63 |
+
f"{URL}/add_key",
|
64 |
+
files={"key": io.BytesIO(initial_bytes=serialized_evaluation_keys)},
|
65 |
+
)
|
66 |
+
assert response.status_code == STATUS_OK
|
67 |
+
uid = response.json()["uid"]
|
68 |
+
|
69 |
+
inferences = []
|
70 |
+
# Launch the queries
|
71 |
+
clear_input = X[[0], :].numpy()
|
72 |
+
print("Input shape:", clear_input.shape)
|
73 |
+
|
74 |
+
assert isinstance(clear_input, numpy.ndarray)
|
75 |
+
print("Quantize/Encrypt")
|
76 |
+
encrypted_input = client.quantize_encrypt_serialize(clear_input) # Encrypt the data
|
77 |
+
assert isinstance(encrypted_input, bytes)
|
78 |
+
|
79 |
+
print(f"Encrypted input size: {sys.getsizeof(encrypted_input) / 1024 / 1024:.2f} MB")
|
80 |
+
|
81 |
+
print("Posting query")
|
82 |
+
inferences.append(
|
83 |
+
grequests.post(
|
84 |
+
f"{URL}/compute",
|
85 |
+
files={
|
86 |
+
"model_input": io.BytesIO(encrypted_input),
|
87 |
+
},
|
88 |
+
data={
|
89 |
+
"uid": uid,
|
90 |
+
},
|
91 |
+
)
|
92 |
+
)
|
93 |
+
|
94 |
+
del encrypted_input
|
95 |
+
del serialized_evaluation_keys
|
96 |
+
|
97 |
+
print("Posted!")
|
98 |
+
|
99 |
+
# Unpack the results
|
100 |
+
decrypted_predictions = []
|
101 |
+
for result in grequests.map(inferences):
|
102 |
+
if result is None:
|
103 |
+
raise ValueError(
|
104 |
+
"Result is None, probably because the server crashed due to lack of available memory."
|
105 |
+
)
|
106 |
+
assert result.status_code == STATUS_OK
|
107 |
+
print("OK!")
|
108 |
+
|
109 |
+
encrypted_result = result.content
|
110 |
+
decrypted_prediction = client.deserialize_decrypt_dequantize(encrypted_result)[0]
|
111 |
+
decrypted_predictions.append(decrypted_prediction)
|
112 |
+
print(decrypted_predictions)
|
113 |
+
|
114 |
+
|
115 |
+
if __name__ == "__main__":
|
116 |
+
main()
|
back/server_deploy.py
ADDED
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
"""Deployment server.
|
3 |
+
|
4 |
+
Routes:
|
5 |
+
- Get client.zip
|
6 |
+
- Add a key
|
7 |
+
- Compute
|
8 |
+
"""
|
9 |
+
|
10 |
+
import io
|
11 |
+
import os
|
12 |
+
import uuid
|
13 |
+
from pathlib import Path
|
14 |
+
from typing import Dict
|
15 |
+
|
16 |
+
import uvicorn
|
17 |
+
from fastapi import FastAPI, Form, HTTPException, UploadFile
|
18 |
+
from fastapi.responses import FileResponse, StreamingResponse
|
19 |
+
|
20 |
+
# No relative import here because when not used in the package itself
|
21 |
+
from concrete.ml.deployment import FHEModelServer
|
22 |
+
|
23 |
+
if __name__ == "__main__":
|
24 |
+
app = FastAPI(debug=False)
|
25 |
+
|
26 |
+
FILE_FOLDER = Path(__file__).parent
|
27 |
+
|
28 |
+
KEY_PATH = Path(os.environ.get("KEY_PATH", FILE_FOLDER / Path("server_keys")))
|
29 |
+
CLIENT_SERVER_PATH = Path(os.environ.get("PATH_TO_MODEL", FILE_FOLDER / Path("dev")))
|
30 |
+
PORT = os.environ.get("PORT", "5000")
|
31 |
+
|
32 |
+
fhe = FHEModelServer(str(CLIENT_SERVER_PATH.resolve()))
|
33 |
+
|
34 |
+
KEYS: Dict[str, bytes] = {}
|
35 |
+
|
36 |
+
PATH_TO_CLIENT = (CLIENT_SERVER_PATH / "client.zip").resolve()
|
37 |
+
PATH_TO_SERVER = (CLIENT_SERVER_PATH / "server.zip").resolve()
|
38 |
+
|
39 |
+
assert PATH_TO_CLIENT.exists()
|
40 |
+
assert PATH_TO_SERVER.exists()
|
41 |
+
|
42 |
+
@app.get("/get_client")
|
43 |
+
def get_client():
|
44 |
+
"""Get client.
|
45 |
+
|
46 |
+
Returns:
|
47 |
+
FileResponse: client.zip
|
48 |
+
|
49 |
+
Raises:
|
50 |
+
HTTPException: if the file can't be find locally
|
51 |
+
"""
|
52 |
+
path_to_client = (CLIENT_SERVER_PATH / "client.zip").resolve()
|
53 |
+
if not path_to_client.exists():
|
54 |
+
raise HTTPException(status_code=500, detail="Could not find client.")
|
55 |
+
return FileResponse(path_to_client, media_type="application/zip")
|
56 |
+
|
57 |
+
@app.post("/add_key")
|
58 |
+
async def add_key(key: UploadFile):
|
59 |
+
"""Add public key.
|
60 |
+
|
61 |
+
Arguments:
|
62 |
+
key (UploadFile): public key
|
63 |
+
|
64 |
+
Returns:
|
65 |
+
Dict[str, str]
|
66 |
+
- uid: uid a personal uid
|
67 |
+
"""
|
68 |
+
uid = str(uuid.uuid4())
|
69 |
+
KEYS[uid] = await key.read()
|
70 |
+
return {"uid": uid}
|
71 |
+
|
72 |
+
@app.post("/compute")
|
73 |
+
async def compute(model_input: UploadFile, uid: str = Form()): # noqa: B008
|
74 |
+
"""Compute the circuit over encrypted input.
|
75 |
+
|
76 |
+
Arguments:
|
77 |
+
model_input (UploadFile): input of the circuit
|
78 |
+
uid (str): uid of the public key to use
|
79 |
+
|
80 |
+
Returns:
|
81 |
+
StreamingResponse: the result of the circuit
|
82 |
+
"""
|
83 |
+
key = KEYS[uid]
|
84 |
+
encrypted_results = fhe.run(
|
85 |
+
serialized_encrypted_quantized_data=await model_input.read(),
|
86 |
+
serialized_evaluation_keys=key,
|
87 |
+
)
|
88 |
+
return StreamingResponse(
|
89 |
+
io.BytesIO(encrypted_results),
|
90 |
+
)
|
91 |
+
|
92 |
+
uvicorn.run(app, host="0.0.0.0", port=int(PORT))
|
front/dist/index.html
CHANGED
@@ -5,7 +5,7 @@
|
|
5 |
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
6 |
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
7 |
<title>Vite + React + TS</title>
|
8 |
-
<script type="module" crossorigin src="/assets/index-
|
9 |
<link rel="stylesheet" crossorigin href="/assets/index-DBF5kfs0.css">
|
10 |
</head>
|
11 |
<body>
|
|
|
5 |
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
6 |
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
7 |
<title>Vite + React + TS</title>
|
8 |
+
<script type="module" crossorigin src="/assets/index-Cqst1UVv.js"></script>
|
9 |
<link rel="stylesheet" crossorigin href="/assets/index-DBF5kfs0.css">
|
10 |
</head>
|
11 |
<body>
|
front/src/Homescene.tsx
CHANGED
@@ -17,7 +17,7 @@ export default function HomeScene() {
|
|
17 |
formData.append("file", file);
|
18 |
|
19 |
try {
|
20 |
-
const response = await fetch("
|
21 |
method: "POST",
|
22 |
body: formData,
|
23 |
});
|
|
|
17 |
formData.append("file", file);
|
18 |
|
19 |
try {
|
20 |
+
const response = await fetch("http://0.0.0.0:7860/uploadpdf", {
|
21 |
method: "POST",
|
22 |
body: formData,
|
23 |
});
|
front/src/VerificationScene.tsx
CHANGED
@@ -35,16 +35,17 @@ export default function VerificationScene() {
|
|
35 |
};
|
36 |
|
37 |
const sendImageToServer = async (
|
38 |
-
|
|
|
39 |
setLoading: (n: boolean) => void,
|
40 |
) => {
|
41 |
try {
|
42 |
-
const response = await fetch("http://0.0.0.0:7860/
|
43 |
method: "POST",
|
44 |
headers: {
|
45 |
"Content-Type": "application/json",
|
46 |
},
|
47 |
-
body: JSON.stringify({
|
48 |
});
|
49 |
|
50 |
if (!response.ok) {
|
@@ -145,6 +146,7 @@ export default function VerificationScene() {
|
|
145 |
<ResultContainer
|
146 |
sendImageToServer={sendImageToServer}
|
147 |
idCardPicture={idCardPicture ?? ""}
|
|
|
148 |
/>
|
149 |
);
|
150 |
}
|
|
|
35 |
};
|
36 |
|
37 |
const sendImageToServer = async (
|
38 |
+
idCard: string,
|
39 |
+
profileImage: string,
|
40 |
setLoading: (n: boolean) => void,
|
41 |
) => {
|
42 |
try {
|
43 |
+
const response = await fetch("http://0.0.0.0:7860/uploadids", {
|
44 |
method: "POST",
|
45 |
headers: {
|
46 |
"Content-Type": "application/json",
|
47 |
},
|
48 |
+
body: JSON.stringify({ idCard: idCard, profileImage: profileImage }),
|
49 |
});
|
50 |
|
51 |
if (!response.ok) {
|
|
|
146 |
<ResultContainer
|
147 |
sendImageToServer={sendImageToServer}
|
148 |
idCardPicture={idCardPicture ?? ""}
|
149 |
+
profileImage={profilePicture ?? ""}
|
150 |
/>
|
151 |
);
|
152 |
}
|
front/src/components/ResultContainer.tsx
CHANGED
@@ -3,14 +3,20 @@ import { useEffect, useState } from "react";
|
|
3 |
export default function ResultContainer({
|
4 |
sendImageToServer,
|
5 |
idCardPicture,
|
|
|
6 |
}: {
|
7 |
-
sendImageToServer: (
|
|
|
|
|
|
|
|
|
8 |
idCardPicture: string;
|
|
|
9 |
}) {
|
10 |
const [loading, setLoading] = useState<boolean>(true);
|
11 |
|
12 |
useEffect(() => {
|
13 |
-
sendImageToServer(idCardPicture ?? "", setLoading);
|
14 |
}, []);
|
15 |
return <>{loading ? <>Fetching</> : <>Perfect</>}</>;
|
16 |
}
|
|
|
3 |
export default function ResultContainer({
|
4 |
sendImageToServer,
|
5 |
idCardPicture,
|
6 |
+
profileImage,
|
7 |
}: {
|
8 |
+
sendImageToServer: (
|
9 |
+
pic: string,
|
10 |
+
pic2: string,
|
11 |
+
setLoading: (n: boolean) => void,
|
12 |
+
) => void;
|
13 |
idCardPicture: string;
|
14 |
+
profileImage: string;
|
15 |
}) {
|
16 |
const [loading, setLoading] = useState<boolean>(true);
|
17 |
|
18 |
useEffect(() => {
|
19 |
+
sendImageToServer(idCardPicture ?? "", profileImage ?? "", setLoading);
|
20 |
}, []);
|
21 |
return <>{loading ? <>Fetching</> : <>Perfect</>}</>;
|
22 |
}
|
main.py
CHANGED
@@ -17,6 +17,8 @@ app.add_middleware(
|
|
17 |
allow_headers=["*"],
|
18 |
)
|
19 |
|
|
|
|
|
20 |
class ImageData(BaseModel):
|
21 |
image: str
|
22 |
|
@@ -35,16 +37,16 @@ async def verif() -> FileResponse:
|
|
35 |
async def upload_pdf(data: ImageData):
|
36 |
header, encoded = data.image.split(',', 1)
|
37 |
binary_data = base64.b64decode(encoded)
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
return {"message": "Image reçue et sauvegardée"}
|
42 |
|
43 |
@router.post("/uploadids")
|
44 |
async def upload_ids(data: ImagesData):
|
|
|
45 |
return {"message": "Images reçues et sauvegardées"}
|
46 |
|
47 |
-
|
48 |
app.include_router(router)
|
49 |
|
50 |
app.mount("/", StaticFiles(directory="front/dist", html=True), name="static")
|
|
|
17 |
allow_headers=["*"],
|
18 |
)
|
19 |
|
20 |
+
pdf = 0
|
21 |
+
|
22 |
class ImageData(BaseModel):
|
23 |
image: str
|
24 |
|
|
|
37 |
async def upload_pdf(data: ImageData):
|
38 |
header, encoded = data.image.split(',', 1)
|
39 |
binary_data = base64.b64decode(encoded)
|
40 |
+
# Call
|
41 |
+
pdf = binary_data;
|
42 |
+
time.sleep(10)
|
43 |
return {"message": "Image reçue et sauvegardée"}
|
44 |
|
45 |
@router.post("/uploadids")
|
46 |
async def upload_ids(data: ImagesData):
|
47 |
+
# Call To The model
|
48 |
return {"message": "Images reçues et sauvegardées"}
|
49 |
|
|
|
50 |
app.include_router(router)
|
51 |
|
52 |
app.mount("/", StaticFiles(directory="front/dist", html=True), name="static")
|