cognitivess / cognitivess_model /configuration_cognitivess.py
cognitivess's picture
Update cognitivess_model/configuration_cognitivess.py
a4555a9 verified
raw
history blame
466 Bytes
from transformers import LlamaConfig
class CognitivessConfig(LlamaConfig):
model_type = "cognitivess"
def __init__(self, quantization_config=None, **kwargs):
super().__init__(**kwargs)
self.quantization_config = quantization_config
def to_dict(self):
output = super().to_dict()
if self.quantization_config is not None:
output["quantization_config"] = self.quantization_config.to_dict()
return output