WXM2000 commited on
Commit
33fd717
1 Parent(s): 0562e00

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -1,9 +1,9 @@
1
  from transformers import AutoModelForTokenClassification,AutoTokenizer,pipeline
2
  import gradio as gr
3
  import torch
4
- model = AutoModelForTokenClassification.from_pretrained('uer/roberta-base-finetuned-cluener2020-chinese',local_files_only=True)#cache_dir="C:\2023\Huggingface_4_12\Gradio Tutorial",force_download=True)
5
  # model = AutoModelForTokenClassification.from_pretrained('C:\\2023\Huggingface_4_12\Gradio Tutorial\cache3\Huggingface_4_12\Gradio Tutorial\models--uer--roberta-base-finetuned-cluener2020-chinese\blobs\3d20fdef0b0f04d283e1693ef4c030b133fa7c3c')
6
- tokenizer = AutoTokenizer.from_pretrained('uer/roberta-base-finetuned-cluener2020-chinese',local_files_only=True)
7
 
8
 
9
  ner_pipeline = pipeline('ner', model=model, tokenizer=tokenizer)
 
1
  from transformers import AutoModelForTokenClassification,AutoTokenizer,pipeline
2
  import gradio as gr
3
  import torch
4
+ model = AutoModelForTokenClassification.from_pretrained('uer/roberta-base-finetuned-cluener2020-chinese')#,local_files_only=True)#cache_dir="C:\2023\Huggingface_4_12\Gradio Tutorial",force_download=True)
5
  # model = AutoModelForTokenClassification.from_pretrained('C:\\2023\Huggingface_4_12\Gradio Tutorial\cache3\Huggingface_4_12\Gradio Tutorial\models--uer--roberta-base-finetuned-cluener2020-chinese\blobs\3d20fdef0b0f04d283e1693ef4c030b133fa7c3c')
6
+ tokenizer = AutoTokenizer.from_pretrained('uer/roberta-base-finetuned-cluener2020-chinese')#,local_files_only=True)
7
 
8
 
9
  ner_pipeline = pipeline('ner', model=model, tokenizer=tokenizer)