metadata_transformer / metadata_transformer.py
Kjgarza's picture
token limit
358a050
raw
history blame contribute delete
786 Bytes
import os
from huggingface_hub import login
from easyllm.clients import huggingface
from easyllm.prompt_utils import build_llama2_prompt
TOKEN = os.environ.get("TOKEN")
login(token=TOKEN)
huggingface.prompt_builder = build_llama2_prompt
# system_message = """
# You are a metadata schema translator. You translate metadata from one schema to another.
# """
def translate(schema_input, schema_target):
prompt = '"""{} \n Translate the schema metadata file above to the schema: {}"""'.format(schema_input, schema_target)
response = huggingface.Completion.create(
model="princeton-nlp/Sheared-LLaMA-1.3B",
prompt=prompt,
temperature=0.9,
top_p=0.6,
max_tokens=250,
)
print(response)
return response['choices'][0]['text']