GenSim / cfg.yaml
LeroyWaa's picture
clone cap
796fad1
raw
history blame
2.19 kB
lmps:
tabletop_ui:
prompt_path: prompts/tabletop_ui.py
engine: text-davinci-003
max_tokens: 256
temperature: 0
query_prefix: '# '
query_suffix: '.'
stop: ['#', 'objects = [']
maintain_session: True
debug_mode: False
include_context: True
has_return: False
return_val_name: ret_val
parse_obj_name:
prompt_path: prompts/parse_obj_name.py
engine: text-davinci-003
max_tokens: 512
temperature: 0
query_prefix: '# '
query_suffix: '.'
stop: ['#', 'objects = [']
maintain_session: False
debug_mode: False
include_context: True
has_return: True
return_val_name: ret_val
parse_position:
prompt_path: prompts/parse_position.py
engine: text-davinci-003
max_tokens: 512
temperature: 0
query_prefix: '# '
query_suffix: '.'
stop: ['#']
maintain_session: False
debug_mode: False
include_context: True
has_return: True
return_val_name: ret_val
parse_question:
prompt_path: prompts/parse_question.py
engine: text-davinci-003
max_tokens: 512
temperature: 0
query_prefix: '# '
query_suffix: '.'
stop: ['#', 'objects = [']
maintain_session: False
debug_mode: False
include_context: True
has_return: True
return_val_name: ret_val
transform_shape_pts:
prompt_path: prompts/transform_shape_pts.py
engine: text-davinci-003
max_tokens: 512
temperature: 0
query_prefix: '# '
query_suffix: '.'
stop: ['#']
maintain_session: False
debug_mode: False
include_context: True
has_return: True
return_val_name: new_shape_pts
fgen:
prompt_path: prompts/fgen.py
engine: text-davinci-003
max_tokens: 512
temperature: 0
query_prefix: '# define function: '
query_suffix: '.'
stop: ['# define', '# example']
maintain_session: False
debug_mode: False
include_context: True
tabletop_coords:
top_left: [-0.25, -0.25]
top_side: [0, -0.25]
top_right: [0.25, -0.25]
left_side: [-0.25, -0.5]
middle: [0, -0.5]
right_side: [0.25, -0.5]
bottom_left: [-0.25, -0.75]
bottom_side: [0, -0.75]
bottom_right: [0.25, -0.75]
table_z: 0