bamec66557's picture
Update README.md
3af16a4 verified
|
raw
history blame
3.84 kB
metadata
base_model:
  - bamec66557/mergekit-slerp-uhnpbqg
  - bamec66557/mergekit-slerp-tsgkafq
library_name: transformers
tags:
  - mergekit
  - merge
  - not-for-all-audiences
license: apache-2.0
language:
  - en

merge

This is a merge of pre-trained language models created using mergekit.

Merge Details

Merge Method

This model was merged using the SLERP merge method.

Models Merged

The following models were included in the merge:

Configuration

The following YAML configuration was used to produce this model:

# Merging MISCHIEVOUS-12B-Mix Models with Sliced SLERP
base_model: bamec66557/mergekit-slerp-tsgkafq
dtype: bfloat16
merge_method: slerp
tokenizer_source: union

# Slices Configuration (Layer-Specific Merging)
slices:
  - name: initial_layers
    sources:
      - model: bamec66557/mergekit-slerp-uhnpbqg
        layer_range: [0, 10]
      - model: bamec66557/mergekit-slerp-tsgkafq
        layer_range: [0, 10]
    parameters:
      t:
        - name: self_attn
          value: [0.8, 0.85, 0.9, 0.95, 1.0]
        - name: mlp
          value: [0.9, 0.95, 1.0, 1.05, 1.1]
        - name: layer_norm
          value: [0.6, 0.65, 0.7, 0.75, 0.8]
        - name: embed_tokens
          value: [1.0]

  - name: middle_layers
    sources:
      - model: bamec66557/mergekit-slerp-uhnpbqg
        layer_range: [10, 20]
      - model: bamec66557/mergekit-slerp-tsgkafq
        layer_range: [10, 20]
    parameters:
      t:
        - name: self_attn
          value: [0.7, 0.75, 0.8, 0.85, 0.9]
        - name: mlp
          value: [1.0, 0.95, 0.9, 0.85, 0.8]
        - name: layer_norm
          value: [0.5, 0.55, 0.6, 0.65, 0.7]
        - name: embed_tokens
          value: [1.0]

  - name: upper_middle_layers
    sources:
      - model: bamec66557/mergekit-slerp-uhnpbqg
        layer_range: [20, 30]
      - model: bamec66557/mergekit-slerp-tsgkafq
        layer_range: [20, 30]
    parameters:
      t:
        - name: self_attn
          value: [0.6, 0.65, 0.7, 0.75, 0.8]
        - name: mlp
          value: [0.8, 0.75, 0.7, 0.65, 0.6]
        - name: layer_norm
          value: [0.4, 0.45, 0.5, 0.55, 0.6]
        - name: embed_tokens
          value: [1.0]

  - name: final_layers
    sources:
      - model: bamec66557/mergekit-slerp-uhnpbqg
        layer_range: [30, 40]
      - model: bamec66557/mergekit-slerp-tsgkafq
        layer_range: [30, 40]
    parameters:
      t:
        - name: self_attn
          value: [0.9, 1.0, 1.1, 1.2, 1.3]
        - name: mlp
          value: [0.7, 0.65, 0.6, 0.55, 0.5]
        - name: layer_norm
          value: [0.7, 0.75, 0.8, 0.85, 0.9]
        - name: embed_tokens
          value: [1.0]

# Regularization (Prevent Overfitting During Merging)
regularization:
  methods:
    - name: weight_clipping
      clip_range: [-0.2, 0.2]
    - name: random_noise
      scale: 0.015
    - name: l2_norm
      scale: 0.01

# Postprocessing (Enhance Merged Model Quality)
postprocessing:
  operations:
    - name: random_noise
      scale: 0.0025
    - name: non_linear_scaling
      parameters:
        function: tanh
    - name: sharpening
      intensity: 0.3
    - name: gaussian_smoothing
      sigma: 1.5
    - name: smoothing
      parameters:
        adaptive: true
        range: [0.8, 1.2]
        kernel_size: 5
    - name: normalize
    - name: dynamic_scaling
      scale_range: [0.75, 1.25]

# Optional: Ties Merging (Advanced Technique)
ties:
  enabled: false  # Set to true if ties merging is required
  method: greedy  # Options: greedy, optimal, random
  layers: [0, 10, 20, 30]  # Example layers for ties merging