from fairseq.models.roberta import RobertaModel import nltk import sys # conditionally load in nltk.punkt try: nltk.data.find("tokenizers/punkt") except LookupError: nltk.download("punkt") try: nltk.data.find("sentiment/vader_lexicon.zip") except LookupError: nltk.download("vader_lexicon") PATH_TO_COSMIC = "./Model/COSMIC" EXTRACTORS_PATH = PATH_TO_COSMIC + "/feature_extraction" EPIK_MODEL_DIR = PATH_TO_COSMIC + "/erc_training" sys.path.append(PATH_TO_COSMIC) sys.path.append(EXTRACTORS_PATH) sys.path.append(EPIK_MODEL_DIR) sys.path.append(".") from Model.COSMIC.feature_extraction.comet.csk_feature_extract import ( CSKFeatureExtractor, ) from Model.COSMIC.erc_training.predict_epik import parse_cosmic_args, load_model roberta = RobertaModel.from_pretrained( EXTRACTORS_PATH + "/checkpoints/epik/", checkpoint_file="checkpoint_best.pt", data_name_or_path="../../epik-bin", ) roberta.eval() # decide = 0 for running with gpu, device="cpu" for running with gpu comet = CSKFeatureExtractor(dir=EXTRACTORS_PATH, device="cpu") cosmic_args = parse_cosmic_args() COSMIC_MODEL = load_model(EPIK_MODEL_DIR + "/epik/best_model.pt", cosmic_args) PATH_TO_DEBERTA = "./Model/DeBERTa" sys.path.append(PATH_TO_DEBERTA) from Model.DeBERTa.deberta import load_model, deberta_init cfg, tokenizer = deberta_init() deberta_model = load_model(cfg, PATH_TO_DEBERTA)