Update README.md
Browse files
README.md
CHANGED
@@ -6,7 +6,15 @@ library_name: transformers
|
|
6 |
tags:
|
7 |
- mergekit
|
8 |
- peft
|
9 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
---
|
11 |
# Untitled LoRA Model (1)
|
12 |
|
@@ -22,4 +30,4 @@ The following command was used to extract this LoRA adapter:
|
|
22 |
|
23 |
```sh
|
24 |
mergekit-extract-lora meta-llama/Meta-Llama-3-8B nvidia/Llama3-ChatQA-1.5-8B OUTPUT_PATH --no-lazy-unpickle --rank=64
|
25 |
-
```
|
|
|
6 |
tags:
|
7 |
- mergekit
|
8 |
- peft
|
9 |
+
- nvidia
|
10 |
+
- chatqa-1.5
|
11 |
+
- chatqa
|
12 |
+
- llama-3
|
13 |
+
- pytorch
|
14 |
+
license: llama3
|
15 |
+
language:
|
16 |
+
- en
|
17 |
+
pipeline_tag: text-generation
|
18 |
---
|
19 |
# Untitled LoRA Model (1)
|
20 |
|
|
|
30 |
|
31 |
```sh
|
32 |
mergekit-extract-lora meta-llama/Meta-Llama-3-8B nvidia/Llama3-ChatQA-1.5-8B OUTPUT_PATH --no-lazy-unpickle --rank=64
|
33 |
+
```
|