from transformers import LlamaConfig | |
class CognitivessConfig(LlamaConfig): | |
model_type = "cognitivess" | |
def __init__(self, quantization_config=None, **kwargs): | |
super().__init__(**kwargs) | |
self.quantization_config = quantization_config | |
def to_dict(self): | |
output = super().to_dict() | |
if self.quantization_config is not None: | |
output["quantization_config"] = self.quantization_config.to_dict() | |
return output |