Hiveurban commited on
Commit
a774a0e
·
verified ·
1 Parent(s): fe2638e

Upload handler.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. handler.py +16 -16
handler.py CHANGED
@@ -8,7 +8,7 @@ class EndpointHandler:
8
  self.model = AutoModel.from_pretrained(
9
  path,
10
  trust_remote_code=True,
11
- # do_syntax=True, do_prefix=False, do_morph=False, do_ner=True, do_lex=True
12
  )
13
  self.model.eval()
14
 
@@ -22,19 +22,19 @@ class EndpointHandler:
22
  """
23
  # return self.pipeline(data['inputs'])
24
  outputs = self.model.predict(data['inputs'], self.tokenizer, output_style='json')
25
- # for i, output in enumerate(outputs):
26
- # lem = ' '.join([x['lex'] for x in output['tokens']])
27
- # ner = [
28
- # {
29
- # 'word': ' '.join([x['lex'] for x in output['tokens'][x['token_start']:x['token_end'] + 1]]),
30
- # 'entity_group': x['label'],
31
- # 'token_start': x['token_start'],
32
- # 'token_end': x['token_end']
33
- # }
34
- # for x in output['ner_entities']
35
- # ]
36
- # outputs[i] = {
37
- # 'lex': lem,
38
- # 'ner': ner
39
- # }
40
  return outputs
 
8
  self.model = AutoModel.from_pretrained(
9
  path,
10
  trust_remote_code=True,
11
+ do_syntax=True, do_prefix=False, do_morph=False, do_ner=True, do_lex=True
12
  )
13
  self.model.eval()
14
 
 
22
  """
23
  # return self.pipeline(data['inputs'])
24
  outputs = self.model.predict(data['inputs'], self.tokenizer, output_style='json')
25
+ for i, output in enumerate(outputs):
26
+ lem = ' '.join([x['lex'] for x in output['tokens']])
27
+ ner = [
28
+ {
29
+ 'word': ' '.join([x['lex'] for x in output['tokens'][x['token_start']:x['token_end'] + 1]]),
30
+ 'entity_group': x['label'],
31
+ 'token_start': x['token_start'],
32
+ 'token_end': x['token_end']
33
+ }
34
+ for x in output['ner_entities']
35
+ ]
36
+ outputs[i] = {
37
+ 'lex': lem,
38
+ 'ner': ner
39
+ }
40
  return outputs