Upload handler.py with huggingface_hub
Browse files- handler.py +7 -7
handler.py
CHANGED
@@ -1,14 +1,12 @@
|
|
1 |
-
from transformers import pipeline, AutoModelForTokenClassification, AutoTokenizer
|
2 |
from typing import Dict, List, Any
|
3 |
-
from
|
4 |
|
5 |
|
6 |
class EndpointHandler:
|
7 |
def __init__(self, path="."):
|
8 |
-
|
9 |
-
|
10 |
-
self.
|
11 |
-
self.pipeline.tokenizer.backend_tokenizer.decoder = WordPiece()
|
12 |
|
13 |
def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
|
14 |
"""
|
@@ -18,4 +16,6 @@ class EndpointHandler:
|
|
18 |
Return:
|
19 |
A :obj:`list` | `dict`: will be serialized and returned
|
20 |
"""
|
21 |
-
return self.pipeline(data['inputs'])
|
|
|
|
|
|
|
|
1 |
from typing import Dict, List, Any
|
2 |
+
from transformers import AutoModel, AutoTokenizer
|
3 |
|
4 |
|
5 |
class EndpointHandler:
|
6 |
def __init__(self, path="."):
|
7 |
+
self.tokenizer = AutoTokenizer.from_pretrained(path)
|
8 |
+
self.model = AutoModel.from_pretrained(path, trust_remote_code=True)
|
9 |
+
self.model.eval()
|
|
|
10 |
|
11 |
def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
|
12 |
"""
|
|
|
16 |
Return:
|
17 |
A :obj:`list` | `dict`: will be serialized and returned
|
18 |
"""
|
19 |
+
# return self.pipeline(data['inputs'])
|
20 |
+
return self.model.predict([data['inputs']], self.tokenizer, output_style='json')
|
21 |
+
|