Ahmed BALDE commited on
Commit
6d612ea
·
1 Parent(s): 6600415

Create handler

Browse files
Files changed (3) hide show
  1. .handler.py.~undo-tree~ +83 -0
  2. handler.py +46 -0
  3. handler.py~ +39 -0
.handler.py.~undo-tree~ ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ (undo-tree-save-format-version . 1)
2
+ "1eaf73aaceb14fde3836d9d11469cf9f74372879"
3
+ [nil nil nil nil (25763 41835 232785 506000) 0 nil]
4
+ ([nil nil ((nil rear-nonsticky nil 508 . 509) (nil fontified nil 1 . 509) (1 . 509) (t . -1)) nil (25763 41835 232784 227000) 0 nil])
5
+ ([nil nil ((511 . 519) (#(" " 0 8 (fontified nil)) . 510) (undo-tree-id14 . -8) (509 . 519)) nil (25763 41835 232782 980000) 0 nil])
6
+ ([nil nil ((nil rear-nonsticky nil 1405 . 1406) (nil fontified nil 519 . 1406) (519 . 1406)) nil (25763 41835 232781 421000) 0 nil])
7
+ ([nil nil ((#(" " 0 4 (fontified t)) . -658) 665) nil (25763 41835 232780 270000) 0 nil])
8
+ ([nil nil ((#(" import torch.cuda
9
+ device = \"cuda\" if torch.cuda.is_available() else \"cpu\"
10
+ " 0 8 (fontified t) 8 14 (fontified t face font-lock-keyword-face) 14 26 (fontified t) 26 32 (fontified t face font-lock-variable-name-face) 32 35 (fontified t) 35 41 (fontified t face font-lock-string-face) 41 42 (fontified t) 42 44 (fontified t face font-lock-keyword-face) 44 71 (fontified t) 71 75 (fontified t face font-lock-keyword-face) 75 76 (fontified t) 76 81 (fontified t face font-lock-string-face) 81 82 (fontified t)) . 511) (undo-tree-id11 . -82) (undo-tree-id12 . -8) (undo-tree-id13 . -82) 593) nil (25763 41835 232779 341000) 0 nil])
11
+ ([nil nil ((#("
12
+ " 0 1 (fontified t)) . -510) (undo-tree-id8 . -1) (undo-tree-id9 . -1) (undo-tree-id10 . -1) 511) nil (25763 41835 232776 666000) 0 nil])
13
+ ([nil nil ((nil rear-nonsticky nil 182 . 183) (nil fontified nil 182 . 183) (nil fontified nil 177 . 182) (nil fontified nil 176 . 177) (nil fontified nil 172 . 176) (nil fontified nil 145 . 172) (nil fontified nil 143 . 145) (nil fontified nil 142 . 143) (nil fontified nil 136 . 142) (nil fontified nil 133 . 136) (nil fontified nil 127 . 133) (nil fontified nil 115 . 127) (nil fontified nil 109 . 115) (nil fontified nil 101 . 109) (101 . 183)) nil (25763 41835 232773 592000) 0 nil])
14
+ ([nil nil ((#(" " 0 8 (fontified t)) . -101)) nil (25763 41835 232770 741000) 0 nil])
15
+ ([nil nil ((585 . 593)) nil (25763 41835 232770 40000) 0 nil])
16
+ ([nil nil ((#(" " 0 4 (fontified t)) . -589) (undo-tree-id7 . -4) 593) nil (25763 41835 232768 935000) 0 nil])
17
+ ([nil nil ((649 . 657) (#(" " 0 4 (fontified t)) . 649) (undo-tree-id6 . -4) 653) nil (25763 41835 232767 117000) 0 nil])
18
+ ([nil nil ((1401 . 1402) 657) nil (25763 41835 232764 760000) 0 nil])
19
+ ([nil nil ((1 . 119) (#("from transformers import AutoModelForCausalLM, AutoTokenizer
20
+ from peft import PeftConfig, PeftModel
21
+ import torch.cuda" 0 4 (fontified nil face font-lock-keyword-face) 4 18 (fontified nil) 18 24 (fontified nil face font-lock-keyword-face) 24 61 (fontified nil) 61 65 (fontified nil face font-lock-keyword-face) 65 71 (fontified nil) 71 77 (fontified nil face font-lock-keyword-face) 77 100 (fontified nil) 100 106 (fontified nil face font-lock-keyword-face) 106 117 (fontified nil)) . 1) (undo-tree-id0 . -100) (undo-tree-id1 . -61) (undo-tree-id2 . -61) (undo-tree-id3 . -100) (undo-tree-id4 . -100) (176 . 177) (337 . 350) (462 . 475) (#(" " 0 1 (fontified t face font-lock-doc-face)) . 732) (undo-tree-id5 . -1) (1025 . 1038) 657) nil (25763 41835 232760 138000) 0 nil])
22
+ ([nil nil ((119 . 140) (t 25763 41835 258427 950000)) nil (25763 41879 569745 805000) 0 nil])
23
+ ([nil nil ((140 . 147)) nil (25763 41879 569744 452000) 0 nil])
24
+ ([nil nil ((1 . 149) (#("import torch.cuda
25
+ from peft import PeftConfig, PeftModel
26
+ from transformers import AutoModelForCausalLM, AutoTokenizer
27
+ from typing import Dict, Any" 0 6 (fontified t face font-lock-keyword-face) 6 18 (fontified t) 18 22 (fontified t face font-lock-keyword-face) 22 28 (fontified t) 28 34 (fontified t face font-lock-keyword-face) 34 57 (fontified t) 57 61 (fontified t face font-lock-keyword-face) 61 75 (fontified t) 75 81 (fontified t face font-lock-keyword-face) 81 118 (fontified t) 118 122 (fontified t face font-lock-keyword-face) 122 130 (fontified t) 130 136 (fontified t face font-lock-keyword-face) 136 146 (fontified t)) . -1) (undo-tree-id15 . -146) (undo-tree-id16 . -118) (undo-tree-id17 . -118) (undo-tree-id18 . -57) (undo-tree-id19 . -57) (undo-tree-id20 . -18) (undo-tree-id21 . -18) (undo-tree-id22 . -103) (undo-tree-id23 . -118) (undo-tree-id24 . -118) (undo-tree-id25 . -146) 147) nil (25763 41879 569740 198000) 0 nil])
28
+ ([nil nil ((551 . 560) (t 25763 41879 605715 615000)) nil (25763 41942 449761 53000) 0 nil])
29
+ ([nil nil ((1 . 2)) nil (25763 41942 449760 243000) 0 nil])
30
+ ([nil nil ((nil rear-nonsticky nil 1658 . 1659) (nil fontified nil 1 . 1659) (1 . 1659)) nil (25763 41942 449759 473000) 0 nil])
31
+ ([nil nil ((#("from typing import Any, Dict
32
+
33
+ import torch.cuda
34
+ from peft import PeftConfig, PeftModel
35
+ from transformers import AutoModelForCausalLM, AutoTokenizer
36
+
37
+ device = \"cuda\" if torch.cuda.is_available() else \"cpu\"
38
+
39
+
40
+ class EndpointHandler():
41
+ def __init__(self, path=\"\"):
42
+ config = PeftConfig.from_pretrained(path)
43
+ model = AutoModelForCausalLM.from_pretrained(
44
+ config.base_model_name_or_path, load_in_8bit=True, device_map='auto')
45
+ self.tokenizer = AutoTokenizer.from_pretrained(
46
+ config.base_model_name_or_path)
47
+
48
+ # Load the Lora model
49
+ self.model = PeftModel.from_pretrained(model, path)
50
+
51
+ def __call__(self, data: Dict[str, Any]) -> Dict[str, Any]:
52
+ \"\"\"
53
+ Args:
54
+ data (Dict): The payload with the text prompt
55
+ and generation parameters.
56
+ \"\"\"
57
+ # Get inputs
58
+ prompt = data.pop(\"inputs\", None)
59
+ parameters = data.pop(\"parameters\", None)
60
+ if prompt is None:
61
+ raise ValueError(\"Missing prompt.\")
62
+ # Preprocess
63
+ input_ids = self.tokenizer(
64
+ prompt, return_tensors=\"pt\").input_ids.to(device)
65
+ # Forward
66
+ if parameters is not None:
67
+ output = self.model.generate(input_ids=input_ids, **parameters)
68
+ else:
69
+ output = self.model.generate(input_ids=input_ids)
70
+ # Postprocess
71
+ prediction = self.tokenizer.decode(output[0])
72
+ return {\"generated_text\": prediction}
73
+ " 0 4 (fontified t face font-lock-keyword-face) 4 12 (fontified t) 12 18 (fontified t face font-lock-keyword-face) 18 29 (fontified t) 29 30 (fontified t) 30 36 (fontified t face font-lock-keyword-face) 36 48 (fontified t) 48 52 (fontified t face font-lock-keyword-face) 52 58 (fontified t) 58 64 (fontified t face font-lock-keyword-face) 64 87 (fontified t) 87 91 (fontified t face font-lock-keyword-face) 91 105 (fontified t) 105 111 (fontified t face font-lock-keyword-face) 111 148 (fontified t) 148 149 (fontified t) 149 155 (fontified t face font-lock-variable-name-face) 155 158 (fontified t) 158 164 (fontified t face font-lock-string-face) 164 165 (fontified t) 165 167 (fontified t face font-lock-keyword-face) 167 194 (fontified t) 194 198 (fontified t face font-lock-keyword-face) 198 199 (fontified t) 199 204 (fontified t face font-lock-string-face) 204 205 (rear-nonsticky t fontified t) 205 206 (fontified t) 206 207 (fontified t) 207 212 (fontified t face font-lock-keyword-face) 212 213 (fontified t) 213 228 (fontified t face font-lock-type-face) 228 236 (fontified t) 236 239 (fontified t face font-lock-keyword-face) 239 240 (fontified t) 240 248 (fontified t face font-lock-function-name-face) 248 249 (fontified t) 249 253 (fontified t face font-lock-keyword-face) 253 260 (fontified t) 260 262 (fontified t face font-lock-string-face) 262 273 (fontified t) 273 279 (fontified t face font-lock-variable-name-face) 279 323 (fontified t) 323 328 (fontified t face font-lock-variable-name-face) 328 368 (fontified t) 368 381 (fontified t) 381 426 (fontified t) 426 430 (fontified t face font-lock-constant-face) 430 443 (fontified t) 443 449 (fontified t face font-lock-string-face) 449 459 (fontified t) 459 463 (fontified t face font-lock-keyword-face) 463 500 (fontified t) 500 506 (fontified t) 506 507 (fontified t) 507 519 (fontified t) 519 529 (fontified t) 529 530 (fontified t) 530 551 (fontified t) 551 560 (fontified t) 560 568 (fontified t) 568 570 (fontified t face font-lock-comment-delimiter-face) 570 590 (fontified t face font-lock-comment-face) 590 598 (fontified t) 598 602 (fontified t face font-lock-keyword-face) 602 641 (fontified t) 641 648 (fontified t) 648 649 (rear-nonsticky t fontified t) 649 650 (fontified t) 650 651 (fontified t) 651 655 (fontified t) 655 658 (fontified t face font-lock-keyword-face) 658 659 (fontified t) 659 667 (fontified t face font-lock-function-name-face) 667 668 (fontified t) 668 672 (fontified t face font-lock-keyword-face) 672 681 (fontified t) 681 685 (fontified t) 685 688 (fontified t face font-lock-builtin-face) 688 704 (fontified t) 704 707 (fontified t face font-lock-builtin-face) 707 715 (fontified t) 715 723 (fontified t) 723 724 (syntax-table (15) fontified t face font-lock-doc-face) 724 727 (fontified t face font-lock-doc-face) 727 750 (fontified t face font-lock-doc-face) 750 799 (fontified t face font-lock-doc-face) 799 844 (fontified t face font-lock-doc-face) 844 845 (syntax-table (15) fontified t face font-lock-doc-face) 845 854 (fontified t) 854 856 (fontified t face font-lock-comment-delimiter-face) 856 867 (fontified t face font-lock-comment-face) 867 875 (fontified t) 875 881 (fontified t face font-lock-variable-name-face) 881 893 (fontified t) 893 901 (fontified t face font-lock-string-face) 901 903 (fontified t) 903 907 (fontified t face font-lock-constant-face) 907 917 (fontified t) 917 927 (fontified t face font-lock-variable-name-face) 927 939 (fontified t) 939 951 (fontified t face font-lock-string-face) 951 953 (fontified t) 953 957 (fontified t face font-lock-constant-face) 957 967 (fontified t) 967 969 (fontified t face font-lock-keyword-face) 969 977 (fontified t) 977 979 (fontified t face font-lock-keyword-face) 979 980 (fontified t) 980 984 (fontified t face font-lock-constant-face) 984 998 (fontified t) 998 1003 (fontified t face font-lock-keyword-face) 1003 1004 (fontified t) 1004 1007 (fontified t face font-lock-type-face) 1007 1014 (face font-lock-type-face fontified t) 1014 1015 (fontified t) 1015 1032 (face font-lock-string-face fontified t) 1032 1034 (fontified t) 1034 1042 (fontified t) 1042 1044 (fontified t face font-lock-comment-delimiter-face) 1044 1051 (fontified t face font-lock-comment-face) 1051 1055 (fontified t face font-lock-comment-face) 1055 1060 (fontified t) 1060 1063 (fontified t) 1063 1071 (fontified t face font-lock-variable-name-face) 1071 1072 (fontified t face font-lock-variable-name-face) 1072 1075 (fontified t) 1075 1079 (fontified t face font-lock-keyword-face) 1079 1090 (fontified t) 1090 1091 (fontified t) 1091 1103 (fontified t) 1103 1126 (fontified t) 1126 1130 (fontified t face font-lock-string-face) 1130 1153 (fontified t) 1153 1161 (fontified t) 1161 1163 (fontified t face font-lock-comment-delimiter-face) 1163 1171 (fontified t face font-lock-comment-face) 1171 1179 (fontified t) 1179 1181 (fontified t face font-lock-keyword-face) 1181 1193 (fontified t) 1193 1195 (fontified t face font-lock-keyword-face) 1195 1196 (fontified t) 1196 1199 (fontified t face font-lock-keyword-face) 1199 1200 (fontified t) 1200 1204 (fontified t face font-lock-constant-face) 1204 1215 (fontified t) 1215 1218 (fontified t) 1218 1224 (fontified t face font-lock-variable-name-face) 1224 1227 (fontified t) 1227 1231 (fontified t face font-lock-keyword-face) 1231 1239 (fontified t) 1239 1282 (fontified t) 1282 1290 (fontified t) 1290 1294 (fontified t face font-lock-keyword-face) 1294 1308 (fontified t) 1308 1312 (fontified t face font-lock-variable-name-face) 1312 1314 (fontified t face font-lock-variable-name-face) 1314 1317 (fontified t) 1317 1321 (fontified t face font-lock-keyword-face) 1321 1358 (fontified t) 1358 1366 (fontified t) 1366 1368 (fontified t face font-lock-comment-delimiter-face) 1368 1380 (fontified t face font-lock-comment-face) 1380 1388 (fontified t) 1388 1398 (fontified t face font-lock-variable-name-face) 1398 1401 (fontified t) 1401 1405 (fontified t face font-lock-keyword-face) 1405 1434 (fontified t) 1434 1442 (fontified t) 1442 1448 (fontified t face font-lock-keyword-face) 1448 1450 (fontified t) 1450 1466 (fontified t face font-lock-string-face) 1466 1478 (fontified t) 1478 1479 (rear-nonsticky t fontified t) 1479 1480 (fontified t)) . -1660) (undo-tree-id32 . -649) (undo-tree-id33 . -1480) (undo-tree-id34 . -149) (undo-tree-id35 . -650) (undo-tree-id36 . -650) (undo-tree-id37 . -206) (undo-tree-id38 . -1480) 3140) nil (25763 41942 449757 855000) 0 nil])
74
+ ([nil nil ((#("
75
+ " 0 1 (fontified t)) . -1659) (undo-tree-id26 . -1) (undo-tree-id27 . -1) (undo-tree-id28 . -1) (undo-tree-id29 . -1) (undo-tree-id30 . -1) (undo-tree-id31 . -1) 1660) nil (25763 41942 449751 578000) 0 nil])
76
+ ([nil current ((1 . 163) (#("from typing import Dict, Any
77
+ import logging
78
+
79
+ from transformers import AutoModelForCausalLM, AutoTokenizer
80
+ from peft import PeftConfig, PeftModel
81
+ import torch.cuda
82
+ " 0 4 (fontified nil face font-lock-keyword-face) 4 12 (fontified nil) 12 18 (fontified nil face font-lock-keyword-face) 18 29 (fontified nil) 29 35 (fontified nil face font-lock-keyword-face) 35 45 (fontified nil) 45 49 (fontified nil face font-lock-keyword-face) 49 63 (fontified nil) 63 69 (fontified nil face font-lock-keyword-face) 69 106 (fontified nil) 106 110 (fontified nil face font-lock-keyword-face) 110 116 (fontified nil) 116 122 (fontified nil face font-lock-keyword-face) 122 145 (fontified nil) 145 151 (fontified nil face font-lock-keyword-face) 151 163 (fontified nil)) . 1) (462 . 475) (587 . 600) (1187 . 1200) 1659) nil (25763 41942 449734 596000) 0 nil])
83
+ nil
handler.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from typing import Any, Dict
3
+
4
+ import torch.cuda
5
+ from peft import PeftConfig, PeftModel
6
+ from transformers import AutoModelForCausalLM, AutoTokenizer
7
+
8
+ LOGGER = logging.getLogger(__name__)
9
+ logging.basicConfig(level=logging.INFO)
10
+ device = "cuda" if torch.cuda.is_available() else "cpu"
11
+
12
+
13
+ class EndpointHandler():
14
+ def __init__(self, path=""):
15
+ config = PeftConfig.from_pretrained(path)
16
+ model = AutoModelForCausalLM.from_pretrained(
17
+ config.base_model_name_or_path, load_in_8bit=True, device_map='auto')
18
+ self.tokenizer = AutoTokenizer.from_pretrained(
19
+ config.base_model_name_or_path)
20
+ # Load the Lora model
21
+ self.model = PeftModel.from_pretrained(model, path)
22
+
23
+ def __call__(self, data: Dict[str, Any]) -> Dict[str, Any]:
24
+ """
25
+ Args:
26
+ data (Dict): The payload with the text prompt and generation parameters.
27
+ """
28
+ LOGGER.info(f"Received data: {data}")
29
+ # Get inputs
30
+ prompt = data.pop("inputs", None)
31
+ parameters = data.pop("parameters", None)
32
+ if prompt is None:
33
+ raise ValueError("Missing prompt.")
34
+ # Preprocess
35
+ input_ids = self.tokenizer(
36
+ prompt, return_tensors="pt").input_ids.to(device)
37
+ # Forward
38
+ LOGGER.info(f"Start generation.")
39
+ if parameters is not None:
40
+ output = self.model.generate(input_ids=input_ids, **parameters)
41
+ else:
42
+ output = self.model.generate(input_ids=input_ids)
43
+ # Postprocess
44
+ prediction = self.tokenizer.decode(output[0])
45
+ LOGGER.info(f"Generated text: {prediction}")
46
+ return {"generated_text": prediction}
handler.py~ ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch.cuda
2
+ from peft import PeftConfig, PeftModel
3
+ from transformers import AutoModelForCausalLM, AutoTokenizer
4
+
5
+ device = "cuda" if torch.cuda.is_available() else "cpu"
6
+
7
+
8
+ class EndpointHandler():
9
+ def __init__(self, path=""):
10
+ config = PeftConfig.from_pretrained(path)
11
+ model = AutoModelForCausalLM.from_pretrained(
12
+ config.base_model_name_or_path, load_in_8bit=True, device_map='auto')
13
+ self.tokenizer = AutoTokenizer.from_pretrained(
14
+ config.base_model_name_or_path)
15
+ # Load the Lora model
16
+ self.model = PeftModel.from_pretrained(model, path)
17
+
18
+ def __call__(self, data: Dict[str, Any]) -> Dict[str, Any]:
19
+ """
20
+ Args:
21
+ data (Dict): The payload with the text prompt
22
+ and generation parameters.
23
+ """
24
+ # Get inputs
25
+ prompt = data.pop("inputs", None)
26
+ parameters = data.pop("parameters", None)
27
+ if prompt is None:
28
+ raise ValueError("Missing prompt.")
29
+ # Preprocess
30
+ input_ids = self.tokenizer(
31
+ prompt, return_tensors="pt").input_ids.to(device)
32
+ # Forward
33
+ if parameters is not None:
34
+ output = self.model.generate(input_ids=input_ids, **parameters)
35
+ else:
36
+ output = self.model.generate(input_ids=input_ids)
37
+ # Postprocess
38
+ prediction = self.tokenizer.decode(output[0])
39
+ return {"generated_text": prediction}