Update config.json
Browse files- config.json +42 -42
config.json
CHANGED
@@ -1,59 +1,59 @@
|
|
1 |
{
|
2 |
-
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
{
|
11 |
-
|
12 |
-
|
13 |
},
|
14 |
{
|
15 |
-
|
16 |
-
|
17 |
}
|
18 |
],
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
},
|
23 |
-
|
24 |
"Rapid Prototyping and Code Generation",
|
25 |
"Multidisciplinary Understanding",
|
26 |
"Adaptability and Continuous Improvement",
|
27 |
"Ethical Considerations"
|
28 |
],
|
29 |
-
|
30 |
"Software Development",
|
31 |
"Scientific Research",
|
32 |
"Creative Problem-Solving"
|
33 |
],
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
}
|
|
|
1 |
{
|
2 |
+
model_name: "Daedalus_1: The Forge of Visionary Innovation",
|
3 |
+
description: "A cutting-edge AI model blending CodeBERT, Codex, T5, SAM, Gemini, and Megatron for transformative innovation.",
|
4 |
+
version: "1.0",
|
5 |
+
author: "Or4cl3 AI Solutions",
|
6 |
+
framework: "PyTorch",
|
7 |
+
python_version: "3.x",
|
8 |
+
deep_learning_framework: "PyTorch",
|
9 |
+
constituent_models: [
|
10 |
{
|
11 |
+
name: "Or4cl3-1/code-slerp",
|
12 |
+
description: "Empowers Daedalus_1 with expertise in software engineering, code generation, and task-oriented language understanding."
|
13 |
},
|
14 |
{
|
15 |
+
name: "Or4cl3-1/SAM-Gemini-BLOOM-OPT-Gopher-Megatron-slerp",
|
16 |
+
description: "Provides Daedalus_1 with a deep, multifaceted understanding of complex concepts, human-like reasoning, and creativity."
|
17 |
}
|
18 |
],
|
19 |
+
architectural_design: {
|
20 |
+
merge_method: "SLERP (Spherical Linear Interpolation)",
|
21 |
+
selected_layers: 32
|
22 |
},
|
23 |
+
capabilities: [
|
24 |
"Rapid Prototyping and Code Generation",
|
25 |
"Multidisciplinary Understanding",
|
26 |
"Adaptability and Continuous Improvement",
|
27 |
"Ethical Considerations"
|
28 |
],
|
29 |
+
applications: [
|
30 |
"Software Development",
|
31 |
"Scientific Research",
|
32 |
"Creative Problem-Solving"
|
33 |
],
|
34 |
+
training_data: "Internal and External Datasets",
|
35 |
+
training_steps: 200000,
|
36 |
+
batch_size: 32,
|
37 |
+
learning_rate: 0.0001,
|
38 |
+
max_sequence_length: 1024,
|
39 |
+
num_layers: 24,
|
40 |
+
num_heads: 16,
|
41 |
+
hidden_size: 1024,
|
42 |
+
dropout_rate: 0.2,
|
43 |
+
num_epochs: 20,
|
44 |
+
vocab_size: 50257,
|
45 |
+
max_position_embeddings: 1024,
|
46 |
+
encoder_layers: 24,
|
47 |
+
encoder_ffn_dim: 4096,
|
48 |
+
encoder_attention_heads: 16,
|
49 |
+
decoder_layers: 24,
|
50 |
+
decoder_ffn_dim: 4096,
|
51 |
+
decoder_attention_heads: 16,
|
52 |
+
dropout: 0.2,
|
53 |
+
activation_function: "gelu",
|
54 |
+
initializer_range: 0.02,
|
55 |
+
tie_encoder_decoder: true,
|
56 |
+
tie_word_embeddings: true,
|
57 |
+
output_past: true,
|
58 |
+
pretrained_model_name_or_path: "Or4cl3-1/Daedalus_1-slerp"
|
59 |
}
|