from transformers import LlamaConfig | |
class CognitivessConfig(LlamaConfig): | |
model_type = "cognitivess" | |
def __init__(self, quantization_config=None, **kwargs): | |
super().__init__(**kwargs) | |
self.quantization_config = quantization_config |
from transformers import LlamaConfig | |
class CognitivessConfig(LlamaConfig): | |
model_type = "cognitivess" | |
def __init__(self, quantization_config=None, **kwargs): | |
super().__init__(**kwargs) | |
self.quantization_config = quantization_config |