cognitivess / cognitivess_model /modeling_cognitivess.py
cognitivess's picture
Update cognitivess_model/modeling_cognitivess.py
b4e2fae verified
raw
history blame
343 Bytes
from transformers import LlamaForCausalLM
from .configuration_cognitivess import CognitivessConfig
class CognitivessForCausalLM(LlamaForCausalLM):
config_class = CognitivessConfig
def __init__(self, config):
super().__init__(config)
if config.quantization_config:
self.quantize(config.quantization_config)