DevToolKit / tokenizer.py
whackthejacker's picture
Update tokenizer.py
86f546d verified
raw
history blame
1.35 kB
import os
from transformers import pipeline
HF_TOKEN = os.environ.get("HF_TOKEN")
class CodeGenerator:
def __init__(self, model_name='bigscience/T0_3B'):
self.generator = pipeline('text-generation', model=model_name)
def generate_code(self, task_description):
"""
Generates code based on the provided task description using the specified language model.
Parameters:
task_description (str): The task description or prompt for generating the code.
Returns:
str: The generated code.
"""
return self._generate_code_from_model(task_description)
def _generate_code_from_model(self, input_text):
"""
Internal method to generate code from the model.
Parameters:
input_text (str): The input text for code generation.
Returns:
str: The code generated by the language model.
"""
return self.generator(input_text, max_length=50, num_return_sequences=1, do_sample=True)[0]['generated_text']
def main():
task_description = "Develop an app that allows users to search for and modify files on a remote server using the SSH protocol"
code_generator = CodeGenerator()
generated_code = code_generator.generate_code(task_description)
print(generated_code)
if __name__ == "__main__":
main()