Hiveurban commited on
Commit
fe2638e
·
verified ·
1 Parent(s): a76b97c

Upload handler.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. handler.py +15 -15
handler.py CHANGED
@@ -22,19 +22,19 @@ class EndpointHandler:
22
  """
23
  # return self.pipeline(data['inputs'])
24
  outputs = self.model.predict(data['inputs'], self.tokenizer, output_style='json')
25
- for i, output in enumerate(outputs):
26
- lem = ' '.join([x['lex'] for x in output['tokens']])
27
- ner = [
28
- {
29
- 'word': ' '.join([x['lex'] for x in output['tokens'][x['token_start']:x['token_end'] + 1]]),
30
- 'entity_group': x['label'],
31
- 'token_start': x['token_start'],
32
- 'token_end': x['token_end']
33
- }
34
- for x in output['ner_entities']
35
- ]
36
- outputs[i] = {
37
- 'lex': lem,
38
- 'ner': ner
39
- }
40
  return outputs
 
22
  """
23
  # return self.pipeline(data['inputs'])
24
  outputs = self.model.predict(data['inputs'], self.tokenizer, output_style='json')
25
+ # for i, output in enumerate(outputs):
26
+ # lem = ' '.join([x['lex'] for x in output['tokens']])
27
+ # ner = [
28
+ # {
29
+ # 'word': ' '.join([x['lex'] for x in output['tokens'][x['token_start']:x['token_end'] + 1]]),
30
+ # 'entity_group': x['label'],
31
+ # 'token_start': x['token_start'],
32
+ # 'token_end': x['token_end']
33
+ # }
34
+ # for x in output['ner_entities']
35
+ # ]
36
+ # outputs[i] = {
37
+ # 'lex': lem,
38
+ # 'ner': ner
39
+ # }
40
  return outputs