{ "base_model_name_or_path": "bigscience/bloomz-560m", "inference_mode": true, "num_attention_heads": 16, "num_layers": 24, "num_transformer_submodules": 1, "num_virtual_tokens": 42, "peft_type": "PROMPT_TUNING", "prompt_tuning_init": "TEXT", "prompt_tuning_init_text": "\nFor the given input text below, provide the following YAML format, and nothing more: \ntopic: topic of the input text\ntype: type of the input text\"\n\nHere is an example:\nDo you like apples?\n\ntopic: apples\ntype: question\n\nHere is the input text:\n", "task_type": "CAUSAL_LM", "token_dim": 1024, "tokenizer_name_or_path": "bigscience/bloomz-560m" }