Update app.py
Browse files
app.py
CHANGED
@@ -12,35 +12,67 @@ from huggingface_hub import hf_hub_download
|
|
12 |
from llama_index.core.node_parser import SentenceSplitter
|
13 |
|
14 |
# Fungsi untuk memasang ulang llama-cpp-python dengan dukungan CUDA
|
15 |
-
def
|
16 |
try:
|
17 |
-
# Periksa apakah Docker tersedia
|
18 |
-
if not shutil.which("docker"):
|
19 |
-
print("Docker tidak ditemukan. Pastikan Docker sudah diinstal dan berjalan.")
|
20 |
-
return
|
21 |
-
|
22 |
# Periksa apakah CUDA Toolkit tersedia
|
23 |
-
if
|
24 |
-
print("CUDA Toolkit
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
return
|
26 |
|
27 |
-
print("
|
28 |
-
#
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
"
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
# Jalankan perintah Docker
|
40 |
-
subprocess.run(docker_run_command, check=True)
|
41 |
-
print("llama-cpp-python berhasil diinstal dalam container Docker.")
|
42 |
except subprocess.CalledProcessError as e:
|
43 |
-
print(f"Error saat
|
44 |
except Exception as e:
|
45 |
print(f"Kesalahan umum: {e}")
|
46 |
|
@@ -123,7 +155,7 @@ def launch_gradio(chat_engine):
|
|
123 |
|
124 |
# Fungsi Utama untuk Menjalankan Aplikasi
|
125 |
def main():
|
126 |
-
|
127 |
# Unduh model dan inisialisasi pengaturan
|
128 |
model_path = initialize_llama_model()
|
129 |
initialize_settings(model_path) # Mengirimkan model_path ke fungsi initialize_settings
|
|
|
12 |
from llama_index.core.node_parser import SentenceSplitter
|
13 |
|
14 |
# Fungsi untuk memasang ulang llama-cpp-python dengan dukungan CUDA
|
15 |
+
def check_and_install_cuda():
|
16 |
try:
|
|
|
|
|
|
|
|
|
|
|
17 |
# Periksa apakah CUDA Toolkit tersedia
|
18 |
+
if shutil.which("nvcc"):
|
19 |
+
print("CUDA Toolkit ditemukan.")
|
20 |
+
return True
|
21 |
+
else:
|
22 |
+
print("CUDA Toolkit tidak ditemukan. Mencoba mendownload dan menginstal...")
|
23 |
+
|
24 |
+
# Deteksi OS
|
25 |
+
os_name = platform.system().lower()
|
26 |
+
if "linux" in os_name:
|
27 |
+
# URL untuk CUDA Toolkit versi terbaru untuk Linux
|
28 |
+
cuda_url = "https://developer.download.nvidia.com/compute/cuda/12.2.2/local_installers/cuda_12.2.2_535.86.10_linux.run"
|
29 |
+
installer_name = "cuda_installer.run"
|
30 |
+
|
31 |
+
# Unduh CUDA Toolkit
|
32 |
+
print(f"Mengunduh CUDA Toolkit dari {cuda_url}...")
|
33 |
+
subprocess.run(["wget", "-O", installer_name, cuda_url], check=True)
|
34 |
+
|
35 |
+
# Instal CUDA Toolkit
|
36 |
+
print("Memasang CUDA Toolkit...")
|
37 |
+
subprocess.run(["sudo", "sh", installer_name, "--silent", "--toolkit"], check=True)
|
38 |
+
print("CUDA Toolkit berhasil diinstal.")
|
39 |
+
return True
|
40 |
+
|
41 |
+
elif "windows" in os_name:
|
42 |
+
print("Mohon unduh CUDA Toolkit secara manual dari situs NVIDIA untuk Windows.")
|
43 |
+
return False
|
44 |
+
|
45 |
+
else:
|
46 |
+
print("OS tidak dikenali. Mohon instal CUDA Toolkit secara manual.")
|
47 |
+
return False
|
48 |
+
|
49 |
+
except subprocess.CalledProcessError as e:
|
50 |
+
print(f"Error saat mendownload atau menginstal CUDA Toolkit: {e}")
|
51 |
+
return False
|
52 |
+
except Exception as e:
|
53 |
+
print(f"Kesalahan umum: {e}")
|
54 |
+
return False
|
55 |
+
|
56 |
+
def install_llama_with_cuda():
|
57 |
+
try:
|
58 |
+
# Periksa atau instal CUDA Toolkit
|
59 |
+
if not check_and_install_cuda():
|
60 |
+
print("Proses instalasi dihentikan karena CUDA Toolkit tidak tersedia.")
|
61 |
return
|
62 |
|
63 |
+
print("Memasang ulang llama-cpp-python dengan dukungan CUDA...")
|
64 |
+
# Perintah pemasangan ulang llama-cpp-python
|
65 |
+
subprocess.run(
|
66 |
+
[
|
67 |
+
"pip", "install", "llama-cpp-python",
|
68 |
+
"--force-reinstall", "--no-cache-dir"
|
69 |
+
],
|
70 |
+
env={"CMAKE_ARGS": "-DGGML_CUDA=on"}, # Aktifkan dukungan CUDA
|
71 |
+
check=True
|
72 |
+
)
|
73 |
+
print("llama-cpp-python berhasil diinstal ulang dengan dukungan CUDA.")
|
|
|
|
|
|
|
|
|
74 |
except subprocess.CalledProcessError as e:
|
75 |
+
print(f"Error saat menginstal ulang llama-cpp-python: {e}")
|
76 |
except Exception as e:
|
77 |
print(f"Kesalahan umum: {e}")
|
78 |
|
|
|
155 |
|
156 |
# Fungsi Utama untuk Menjalankan Aplikasi
|
157 |
def main():
|
158 |
+
install_llama_with_cuda()
|
159 |
# Unduh model dan inisialisasi pengaturan
|
160 |
model_path = initialize_llama_model()
|
161 |
initialize_settings(model_path) # Mengirimkan model_path ke fungsi initialize_settings
|