Upload README.md
Browse files
README.md
CHANGED
@@ -34,12 +34,15 @@ This model can be utilized for various NLP tasks such as text generation, summar
|
|
34 |
|
35 |
import os
|
36 |
import requests
|
|
|
37 |
from transformers import GPT2LMHeadModel
|
38 |
from cryptography.fernet import Fernet
|
|
|
39 |
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
|
40 |
|
41 |
-
# Download Serbian-GPT-2 model
|
42 |
print("\nDownload Serbian-GPT-2 model...")
|
|
|
|
|
43 |
model_name = 'edukom/Serbian-GPT-2'
|
44 |
base_url = f'https://huggingface.co/{model_name}/resolve/main/'
|
45 |
files_to_download = ['added_tokens.json', 'config.json', 'generation_config.json', 'merges.txt', 'pytorch_model.bin', 'special_tokens_map.json', 'tokenizer.json', 'tokenizer_config.json', 'vocab.json']
|
@@ -66,8 +69,18 @@ This model can be utilized for various NLP tasks such as text generation, summar
|
|
66 |
with open(decryption_data, 'wb') as file:
|
67 |
file.write(decrypted_data)
|
68 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
69 |
# Loading Serbian-GPT-2 model
|
70 |
-
model = GPT2LMHeadModel.from_pretrained(cache_dir)
|
|
|
71 |
print("\nCongratulations, the Serbian-GPT-2 model is ready for use ヅ\n")
|
72 |
|
73 |
except Exception as e:
|
|
|
34 |
|
35 |
import os
|
36 |
import requests
|
37 |
+
import shutil
|
38 |
from transformers import GPT2LMHeadModel
|
39 |
from cryptography.fernet import Fernet
|
40 |
+
|
41 |
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
|
42 |
|
|
|
43 |
print("\nDownload Serbian-GPT-2 model...")
|
44 |
+
|
45 |
+
# Download Serbian-GPT-2 model
|
46 |
model_name = 'edukom/Serbian-GPT-2'
|
47 |
base_url = f'https://huggingface.co/{model_name}/resolve/main/'
|
48 |
files_to_download = ['added_tokens.json', 'config.json', 'generation_config.json', 'merges.txt', 'pytorch_model.bin', 'special_tokens_map.json', 'tokenizer.json', 'tokenizer_config.json', 'vocab.json']
|
|
|
69 |
with open(decryption_data, 'wb') as file:
|
70 |
file.write(decrypted_data)
|
71 |
|
72 |
+
source_path = os.path.join(cache_dir, 'pytorch_model.bin')
|
73 |
+
|
74 |
+
destination_dir = os.path.join(cache_dir, 'models--edukom--Serbian-GPT-2', 'snapshots', '33f6d75d2eaa3479a83a7d5c2bdb29cebff58a4d')
|
75 |
+
os.makedirs(destination_dir, exist_ok=True)
|
76 |
+
|
77 |
+
destination_path = os.path.join(destination_dir, 'pytorch_model.bin')
|
78 |
+
|
79 |
+
shutil.copyfile(source_path, destination_path)
|
80 |
+
|
81 |
# Loading Serbian-GPT-2 model
|
82 |
+
model = GPT2LMHeadModel.from_pretrained(model_name, cache_dir=cache_dir)
|
83 |
+
|
84 |
print("\nCongratulations, the Serbian-GPT-2 model is ready for use ヅ\n")
|
85 |
|
86 |
except Exception as e:
|