bstraehle commited on
Commit
9102fcd
1 Parent(s): 7ac1647

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -4
app.py CHANGED
@@ -123,9 +123,9 @@ def wandb_trace(rag_option, prompt, prompt_template, result, completion, chain_n
123
  status_code = "SUCCESS" if (str(status_msg) == "") else "ERROR",
124
  status_message = str(status_msg),
125
  metadata={
126
- "chunk_overlap": config["chunk_overlap"] if (rag_option == "Off") else "N/A",
127
- "chunk_size": config["chunk_size"] if (rag_option == "Off") else "N/A",
128
- "k": config["k"] if (rag_option == "Off") else "N/A",
129
  "model": config["model"],
130
  "temperature": config["temperature"],
131
  },
@@ -133,6 +133,7 @@ def wandb_trace(rag_option, prompt, prompt_template, result, completion, chain_n
133
  end_time_ms = end_time_ms,
134
  inputs = {"rag_option": rag_option, "prompt": prompt, "prompt_template": prompt_template},
135
  outputs = {"result": str(result), "completion": str(completion)},
 
136
  )
137
  trace.log("test")
138
  wandb.finish()
@@ -149,8 +150,8 @@ def invoke(openai_api_key, rag_option, prompt):
149
  prompt_template = ""
150
  chain_name = ""
151
  status_msg = ""
152
- start_time_ms = round(time.time() * 1000)
153
  try:
 
154
  llm = ChatOpenAI(model_name = config["model"],
155
  openai_api_key = openai_api_key,
156
  temperature = config["temperature"])
 
123
  status_code = "SUCCESS" if (str(status_msg) == "") else "ERROR",
124
  status_message = str(status_msg),
125
  metadata={
126
+ "chunk_overlap": config["chunk_overlap"] if (rag_option != "Off") else "",
127
+ "chunk_size": config["chunk_size"] if (rag_option != "Off") else "",
128
+ "k": config["k"] if (rag_option != "Off") else "N/A",
129
  "model": config["model"],
130
  "temperature": config["temperature"],
131
  },
 
133
  end_time_ms = end_time_ms,
134
  inputs = {"rag_option": rag_option, "prompt": prompt, "prompt_template": prompt_template},
135
  outputs = {"result": str(result), "completion": str(completion)},
136
+ model_dict = {"_kind": "openai"}
137
  )
138
  trace.log("test")
139
  wandb.finish()
 
150
  prompt_template = ""
151
  chain_name = ""
152
  status_msg = ""
 
153
  try:
154
+ start_time_ms = round(time.time() * 1000)
155
  llm = ChatOpenAI(model_name = config["model"],
156
  openai_api_key = openai_api_key,
157
  temperature = config["temperature"])