hf-legisqa / usage_mod.py
gabrielaltay's picture
add gemini
3ead889
raw
history blame
3.06 kB
import streamlit as st
def get_openai_token_usage(response_metadata: dict, model_info: dict):
input_tokens = response_metadata["token_usage"]["prompt_tokens"]
output_tokens = response_metadata["token_usage"]["completion_tokens"]
cost = (
input_tokens * 1e-6 * model_info["cost"]["pmi"]
+ output_tokens * 1e-6 * model_info["cost"]["pmo"]
)
return {
"input_tokens": input_tokens,
"output_tokens": output_tokens,
"cost": cost,
}
def get_anthropic_token_usage(response_metadata: dict, model_info: dict):
input_tokens = response_metadata["usage"]["input_tokens"]
output_tokens = response_metadata["usage"]["output_tokens"]
cost = (
input_tokens * 1e-6 * model_info["cost"]["pmi"]
+ output_tokens * 1e-6 * model_info["cost"]["pmo"]
)
return {
"input_tokens": input_tokens,
"output_tokens": output_tokens,
"cost": cost,
}
def get_together_token_usage(response_metadata: dict, model_info: dict):
input_tokens = response_metadata["token_usage"]["prompt_tokens"]
output_tokens = response_metadata["token_usage"]["completion_tokens"]
cost = (
input_tokens * 1e-6 * model_info["cost"]["pmi"]
+ output_tokens * 1e-6 * model_info["cost"]["pmo"]
)
return {
"input_tokens": input_tokens,
"output_tokens": output_tokens,
"cost": cost,
}
def get_google_token_usage(response_metadata: dict, model_info: dict):
input_tokens = 0
output_tokens = 0
cost = (
input_tokens * 1e-6 * model_info["cost"]["pmi"]
+ output_tokens * 1e-6 * model_info["cost"]["pmo"]
)
return {
"input_tokens": input_tokens,
"output_tokens": output_tokens,
"cost": cost,
}
def get_token_usage(response_metadata: dict, model_info: dict, provider: str):
match provider:
case "OpenAI":
return get_openai_token_usage(response_metadata, model_info)
case "Anthropic":
return get_anthropic_token_usage(response_metadata, model_info)
case "Together":
return get_together_token_usage(response_metadata, model_info)
case "Google":
return get_google_token_usage(response_metadata, model_info)
case _:
raise ValueError()
def display_api_usage(
response_metadata: dict, model_info: dict, provider: str, tag: str | None = None
):
with st.container(border=True):
if tag is None:
st.write("API Usage")
else:
st.write(f"API Usage ({tag})")
token_usage = get_token_usage(response_metadata, model_info, provider)
col1, col2, col3 = st.columns(3)
with col1:
st.metric("Input Tokens", token_usage["input_tokens"])
with col2:
st.metric("Output Tokens", token_usage["output_tokens"])
with col3:
st.metric("Cost", f"${token_usage['cost']:.4f}")
with st.expander("Response Metadata"):
st.warning(response_metadata)