import pandas as pd # Define the data data = { "Original Name" : [], "Proper Display Name": [], "Link" : [], "Model Size" : [], } # Add model information to the data['Original Name'].append('gemma-2-2b-it') data['Proper Display Name'].append('Gemma-2-2B-IT') data['Link'].append('https://huggingface.co/google/gemma-2-2b-it') data['Model Size'].append('2') data['Original Name'].append('gemma-2-9b-it') data['Proper Display Name'].append('Gemma-2-9B-IT') data['Link'].append('https://huggingface.co/google/gemma-2-9b-it') data['Model Size'].append('9') data['Original Name'].append('GPT4o_0513') data['Proper Display Name'].append('GPT4o-0513') data['Link'].append('https://openai.com/index/hello-gpt-4o/') data['Model Size'].append('99999') data['Original Name'].append('llama3-8b-cpt-sea-lionv2-base') data['Proper Display Name'].append('Llama3-8B-CPT-SEA-Lion-v2-Base') data['Link'].append('https://huggingface.co/aisingapore/llama3-8b-cpt-sea-lionv2-base') data['Model Size'].append('8') data['Original Name'].append('llama3-8b-cpt-sea-lionv2-instruct') data['Proper Display Name'].append('Llama3-8B-CPT-SEA-Lion-v2-Instruct') data['Link'].append('https://huggingface.co/aisingapore/llama3-8b-cpt-sea-lionv2.1-instruct') data['Model Size'].append('8') data['Original Name'].append('Meta-Llama-3-8B') data['Proper Display Name'].append('Meta-Llama-3-8B') data['Link'].append('https://ai.meta.com/blog/meta-llama-3/') data['Model Size'].append('8') data['Original Name'].append('Meta-Llama-3-8B-Instruct') data['Proper Display Name'].append('Meta-Llama-3-8B-Instruct') data['Link'].append('https://ai.meta.com/blog/meta-llama-3/') data['Model Size'].append('8') data['Original Name'].append('Meta-Llama-3-70B-Instruct') data['Proper Display Name'].append('Meta-Llama-3-70B-Instruct') data['Link'].append('https://ai.meta.com/blog/meta-llama-3/') data['Model Size'].append('70') data['Original Name'].append('Meta-Llama-3.1-8B') data['Proper Display Name'].append('Meta-Llama-3.1-8B') data['Link'].append('https://ai.meta.com/blog/meta-llama-3-1/') data['Model Size'].append('8') data['Original Name'].append('Meta-Llama-3.1-8B-Instruct') data['Proper Display Name'].append('Meta-Llama-3.1-8B-Instruct') data['Link'].append('https://ai.meta.com/blog/meta-llama-3-1/') data['Model Size'].append('8') data['Original Name'].append('Meta-Llama-3.1-70B') data['Proper Display Name'].append('Meta-Llama-3.1-70B') data['Link'].append('https://ai.meta.com/blog/meta-llama-3-1/') data['Model Size'].append('70') data['Original Name'].append('Meta-Llama-3.1-70B-Instruct') data['Proper Display Name'].append('Meta-Llama-3.1-70B-Instruct') data['Link'].append('https://ai.meta.com/blog/meta-llama-3-1/') data['Model Size'].append('70') data['Original Name'].append('Qwen2_5_0_5B_Instruct') data['Proper Display Name'].append('Qwen2.5-0.5B-Instruct') data['Link'].append('https://huggingface.co/Qwen/Qwen2.5-0.5B-Instruct') data['Model Size'].append(0.5) data['Original Name'].append('Qwen2_5_1_5B_Instruct') data['Proper Display Name'].append('Qwen2.5-1.5B-Instruct') data['Link'].append('https://huggingface.co/Qwen/Qwen2.5-1.5B-Instruct') data['Model Size'].append(1.5) data['Original Name'].append('Qwen2_5_3B_Instruct') data['Proper Display Name'].append('Qwen2.5-3B-Instruct') data['Link'].append('https://huggingface.co/Qwen/Qwen2.5-3B-Instruct') data['Model Size'].append('3') data['Original Name'].append('Qwen2_5_7B_Instruct') data['Proper Display Name'].append('Qwen2.5-7B-Instruct') data['Link'].append('https://huggingface.co/Qwen/Qwen2.5-7B-Instruct') data['Model Size'].append('7') data['Original Name'].append('Qwen2_5_14B_Instruct') data['Proper Display Name'].append('Qwen2.5-14B-Instruct') data['Link'].append('https://huggingface.co/Qwen/Qwen2.5-14B-Instruct') data['Model Size'].append('14') data['Original Name'].append('Qwen2_5_32B_Instruct') data['Proper Display Name'].append('Qwen2.5-32B-Instruct') data['Link'].append('https://huggingface.co/Qwen/Qwen2.5-32B-Instruct') data['Model Size'].append('32') data['Original Name'].append('Qwen2_5_72B_Instruct') data['Proper Display Name'].append('Qwen2.5-72B-Instruct') data['Link'].append('https://huggingface.co/Qwen/Qwen2.5-72B-Instruct') data['Model Size'].append('72') data['Original Name'].append('Qwen2-7B-Instruct') data['Proper Display Name'].append('Qwen2-7B-Instruct') data['Link'].append('https://huggingface.co/Qwen/Qwen2-7B-Instruct') data['Model Size'].append('7') data['Original Name'].append('Qwen2-72B-Instruct') data['Proper Display Name'].append('Qwen2-72B-Instruct') data['Link'].append('https://huggingface.co/Qwen/Qwen2-72B-Instruct') data['Model Size'].append('72') data['Original Name'].append('SeaLLMs-v3-7B-Chat') data['Proper Display Name'].append('SeaLLMs-v3-7B-Chat') data['Link'].append('https://arxiv.org/abs/2407.19672') data['Model Size'].append('7') data['Original Name'].append('MERALiON-LLaMA-3-8B-Chat') data['Proper Display Name'].append('MERALiON-LLaMA-3-8B-Chat') data['Link'].append('https://huggingface.co/spaces/SeaEval/SeaEval_Leaderboard') data['Model Size'].append('8') def get_dataframe(): """ Returns a DataFrame with the data and drops rows with missing values. """ df = pd.DataFrame(data) return df.dropna(axis=0) # cross_openhermes_llama3_8b_4096_inst # Cross-OpenHermes-Llama-3-8B-4096-Inst # https://huggingface.co/spaces/SeaEval/SeaEval_Leaderboard