malmarjeh commited on
Commit
d2adea5
1 Parent(s): 66b5464

Create handler.py

Browse files
Files changed (1) hide show
  1. handler.py +36 -0
handler.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, Dict, List
2
+
3
+ import torch
4
+ import transformers
5
+ from transformers import AutoModelForCausalLM, AutoTokenizer
6
+
7
+ dtype = torch.bfloat16 if torch.cuda.get_device_capability()[0] == 8 else torch.float16
8
+
9
+
10
+ class EndpointHandler:
11
+ def __init__(self, path=""):
12
+ tokenizer = AutoTokenizer.from_pretrained(path, trust_remote_code=True)
13
+ model = AutoModelForCausalLM.from_pretrained(
14
+ path,
15
+ return_dict=True,
16
+ device_map="auto",
17
+ torch_dtype=dtype,
18
+ trust_remote_code=True,
19
+ )
20
+
21
+ generation_config = model.generation_config
22
+ generation_config.max_new_tokens = 2000
23
+ generation_config.temperature = 0
24
+ generation_config.num_return_sequences = 1
25
+ generation_config.pad_token_id = tokenizer.eos_token_id
26
+ generation_config.eos_token_id = tokenizer.eos_token_id
27
+ self.generation_config = generation_config
28
+
29
+ self.pipeline = transformers.pipeline(
30
+ "text-generation", model=model, tokenizer=tokenizer
31
+ )
32
+
33
+ def __call__(self, data: Dict[str, Any]) -> Dict[str, Any]:
34
+ prompt = data.pop("inputs", data)
35
+ result = self.pipeline(prompt, generation_config=self.generation_config)
36
+ return result