Spaces:
Build error
Build error
improved eval_model()
Browse files- competition/00a_InternLM_2.5_and_Llama_3_Results.ipynb +0 -0
- competition/11_Llama-3_8b_analysis.ipynb +0 -0
- competition/11a_Llama-3_8b_p2_analysis.ipynb +0 -0
- competition/14_GLM-4_M3_eval.ipynb +1 -1
- llm_toolkit/llm_utils.py +31 -29
- results/mgtv-llama3_p1_full_metrics.csv +2 -2
- results/mgtv-llama3_p2_full_metrics.csv +2 -2
competition/00a_InternLM_2.5_and_Llama_3_Results.ipynb
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
competition/11_Llama-3_8b_analysis.ipynb
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
competition/11a_Llama-3_8b_p2_analysis.ipynb
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
competition/14_GLM-4_M3_eval.ipynb
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"cells":[{"cell_type":"code","execution_count":1,"metadata":{"executionInfo":{"elapsed":476,"status":"ok","timestamp":1720679526275,"user":{"displayName":"HUANG DONGHAO _","userId":"00977795705617022768"},"user_tz":-480},"id":"uWKRSV6eZsCn"},"outputs":[],"source":["%load_ext autoreload\n","%autoreload 2"]},{"cell_type":"code","execution_count":2,"metadata":{"application/vnd.databricks.v1+cell":{"cellMetadata":{"byteLimit":2048000,"rowLimit":10000},"inputWidgets":{},"nuid":"eb33b19f-1206-41ee-84e2-e6258a12eef7","showTitle":false,"title":""},"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":2534,"status":"ok","timestamp":1720679529344,"user":{"displayName":"HUANG DONGHAO _","userId":"00977795705617022768"},"user_tz":-480},"id":"xwFh14uiZBrI","outputId":"d767799c-34c2-46a5-f052-378146a55321"},"outputs":[],"source":["from pathlib import Path\n","\n","try:\n"," from google.colab import drive\n","\n"," drive.mount(\"/content/drive\")\n"," workding_dir = \"/content/drive/MyDrive/logical-reasoning/\"\n","except ModuleNotFoundError:\n"," workding_dir = str(Path.cwd().parent)"]},{"cell_type":"code","execution_count":3,"metadata":{"application/vnd.databricks.v1+cell":{"cellMetadata":{"byteLimit":2048000,"rowLimit":10000},"inputWidgets":{},"nuid":"6d394937-6c99-4a7c-9d32-7600a280032f","showTitle":false,"title":""},"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":5,"status":"ok","timestamp":1720679529345,"user":{"displayName":"HUANG DONGHAO _","userId":"00977795705617022768"},"user_tz":-480},"id":"G5pNu3zgZBrL","outputId":"160a554f-fb08-4aa0-bc00-0422fb7c1fac"},"outputs":[{"name":"stdout","output_type":"stream","text":["workding dir: /Users/inflaton/code/engd/projects/logical-reasoning\n"]}],"source":["import os\n","import sys\n","from pathlib import Path\n","\n","os.chdir(workding_dir)\n","sys.path.append(workding_dir)\n","print(\"workding dir:\", workding_dir)"]},{"cell_type":"code","execution_count":4,"metadata":{"application/vnd.databricks.v1+cell":{"cellMetadata":{"byteLimit":2048000,"rowLimit":10000},"inputWidgets":{},"nuid":"ac667aba-076e-4de6-9984-8f6a67cb09cd","showTitle":false,"title":""},"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":4,"status":"ok","timestamp":1720679529345,"user":{"displayName":"HUANG DONGHAO _","userId":"00977795705617022768"},"user_tz":-480},"id":"0dVRAabNZBrL","outputId":"b977e116-df16-47cd-9160-a24f611da687"},"outputs":[{"data":{"text/plain":["False"]},"execution_count":4,"metadata":{},"output_type":"execute_result"}],"source":["need_to_setup_env = False\n","need_to_setup_env"]},{"cell_type":"code","execution_count":5,"metadata":{"application/vnd.databricks.v1+cell":{"cellMetadata":{"byteLimit":2048000,"rowLimit":10000},"inputWidgets":{},"nuid":"72f9cf79-7b0d-4d9e-90a0-1fa5251b947f","showTitle":false,"title":""},"executionInfo":{"elapsed":4,"status":"ok","timestamp":1720679529345,"user":{"displayName":"HUANG DONGHAO _","userId":"00977795705617022768"},"user_tz":-480},"id":"hKUOfP2HZBrL"},"outputs":[],"source":["if need_to_setup_env:\n"," %pip install -r requirements.txt\n"," %cd /content/\n"," %rm -rf LLaMA-Factory\n"," !git clone https://github.com/hiyouga/LLaMA-Factory.git\n"," %cd LLaMA-Factory\n"," %ls\n"," %pip install -e .[torch,bitsandbytes]\n"," \n"," os.chdir(workding_dir)\n"," sys.path.append(workding_dir)\n"," print(\"workding dir:\", workding_dir)"]},{"cell_type":"code","execution_count":6,"metadata":{"application/vnd.databricks.v1+cell":{"cellMetadata":{"byteLimit":2048000,"rowLimit":10000},"inputWidgets":{},"nuid":"9f67ec60-2f24-411c-84eb-0dd664b44775","showTitle":false,"title":""},"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":3,"status":"ok","timestamp":1720679529345,"user":{"displayName":"HUANG DONGHAO _","userId":"00977795705617022768"},"user_tz":-480},"id":"hPCC-6m7ZBrM","outputId":"c7aa2c96-5e99-440a-c148-201d79465ff9"},"outputs":[{"name":"stdout","output_type":"stream","text":["loading env vars from: /Users/inflaton/code/engd/projects/logical-reasoning/.env\n"]},{"data":{"text/plain":["True"]},"execution_count":6,"metadata":{},"output_type":"execute_result"}],"source":["from dotenv import find_dotenv, load_dotenv\n","\n","found_dotenv = find_dotenv(\".env\")\n","\n","if len(found_dotenv) == 0:\n"," found_dotenv = find_dotenv(\".env.example\")\n","print(f\"loading env vars from: {found_dotenv}\")\n","load_dotenv(found_dotenv, override=True)"]},{"cell_type":"code","execution_count":7,"metadata":{"application/vnd.databricks.v1+cell":{"cellMetadata":{"byteLimit":2048000,"rowLimit":10000},"inputWidgets":{},"nuid":"f1597656-8042-4878-9d3b-9ebfb8dd86dc","showTitle":false,"title":""},"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":3,"status":"ok","timestamp":1720679529345,"user":{"displayName":"HUANG DONGHAO _","userId":"00977795705617022768"},"user_tz":-480},"id":"1M3IraVtZBrM","outputId":"29ab35f6-2970-4ade-d85d-3174acf8cda0"},"outputs":[{"name":"stdout","output_type":"stream","text":["THUDM/glm-4-9b-chat-1m None False datasets/mgtv results/mgtv-results_m3.csv\n"]}],"source":["import os\n","\n","model_name = os.getenv(\"MODEL_NAME\")\n","adapter_name_or_path = os.getenv(\"ADAPTER_NAME_OR_PATH\")\n","load_in_4bit = os.getenv(\"LOAD_IN_4BIT\") == \"true\"\n","data_path = os.getenv(\"LOGICAL_REASONING_DATA_PATH\")\n","results_path = os.getenv(\"LOGICAL_REASONING_RESULTS_PATH\")\n","use_english_datasets = os.getenv(\"USE_ENGLISH_DATASETS\") == \"true\"\n","\n","print(model_name, adapter_name_or_path, load_in_4bit, data_path, results_path)"]},{"cell_type":"code","execution_count":8,"metadata":{"application/vnd.databricks.v1+cell":{"cellMetadata":{"byteLimit":2048000,"rowLimit":10000},"inputWidgets":{},"nuid":"b2a43943-9324-4839-9a47-cfa72de2244b","showTitle":false,"title":""},"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":564,"status":"ok","timestamp":1720679529907,"user":{"displayName":"HUANG DONGHAO _","userId":"00977795705617022768"},"user_tz":-480},"id":"UgMvt6dIZBrM","outputId":"ce37581c-fd26-46c2-ad87-d933d99f68f7"},"outputs":[{"name":"stdout","output_type":"stream","text":["Python 3.11.9\n","\u001b[33mWARNING: Package(s) not found: flash-attn\u001b[0m\u001b[33m\n","\u001b[0mCPU times: user 4.12 ms, sys: 9.4 ms, total: 13.5 ms\n","Wall time: 651 ms\n"]}],"source":["%%time\n","!python --version\n","!pip show flash-attn"]},{"cell_type":"code","execution_count":9,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":1685,"status":"ok","timestamp":1720679531591,"user":{"displayName":"HUANG DONGHAO _","userId":"00977795705617022768"},"user_tz":-480},"id":"ZuS_FsLyZBrN","outputId":"2cba0105-c505-4395-afbd-2f2fee6581d0"},"outputs":[{"name":"stdout","output_type":"stream","text":["loading /Users/inflaton/code/engd/projects/logical-reasoning/llm_toolkit/logical_reasoning_utils.py\n","MPS is available\n"]}],"source":["from llm_toolkit.llm_utils import *\n","from llm_toolkit.logical_reasoning_utils import *\n","\n","device = check_gpu()"]},{"cell_type":"code","execution_count":14,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":["loading model: THUDM/glm-4-9b-chat-1m with adapter: None\n"]},{"name":"stderr","output_type":"stream","text":["Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n"]},{"data":{"application/vnd.jupyter.widget-view+json":{"model_id":"19aebcc5f4934933878ce3bb1fdc4b32","version_major":2,"version_minor":0},"text/plain":["Downloading shards: 0%| | 0/10 [00:00<?, ?it/s]"]},"metadata":{},"output_type":"display_data"},{"data":{"application/vnd.jupyter.widget-view+json":{"model_id":"c161cec46c72435a8b8d0beab1a329fa","version_major":2,"version_minor":0},"text/plain":["model-00006-of-00010.safetensors: 96%|#########6| 1.91G/1.99G [00:00<?, ?B/s]"]},"metadata":{},"output_type":"display_data"},{"data":{"application/vnd.jupyter.widget-view+json":{"model_id":"13d593455bc94919846a6b5bd288740a","version_major":2,"version_minor":0},"text/plain":["model-00007-of-00010.safetensors: 0%| | 0.00/1.95G [00:00<?, ?B/s]"]},"metadata":{},"output_type":"display_data"},{"data":{"application/vnd.jupyter.widget-view+json":{"model_id":"6154990ed2f047348720919a884f67d0","version_major":2,"version_minor":0},"text/plain":["model-00008-of-00010.safetensors: 0%| | 0.00/1.84G [00:00<?, ?B/s]"]},"metadata":{},"output_type":"display_data"},{"name":"stderr","output_type":"stream","text":["Error while downloading from https://cdn-lfs-us-1.huggingface.co/repos/8f/69/8f69006a64acf627b9dd5b8ed9962abf5ef416d57fc370004ac5f598cea0df41/ea76696ccd8b266eff2e755d9286a789a2b8d644a19b2cdccd365933e97cfa94?response-content-disposition=inline%3B+filename*%3DUTF-8%27%27model-00008-of-00010.safetensors%3B+filename%3D%22model-00008-of-00010.safetensors%22%3B&Expires=1721695460&Policy=eyJTdGF0ZW1lbnQiOlt7IkNvbmRpdGlvbiI6eyJEYXRlTGVzc1RoYW4iOnsiQVdTOkVwb2NoVGltZSI6MTcyMTY5NTQ2MH19LCJSZXNvdXJjZSI6Imh0dHBzOi8vY2RuLWxmcy11cy0xLmh1Z2dpbmdmYWNlLmNvL3JlcG9zLzhmLzY5LzhmNjkwMDZhNjRhY2Y2MjdiOWRkNWI4ZWQ5OTYyYWJmNWVmNDE2ZDU3ZmMzNzAwMDRhYzVmNTk4Y2VhMGRmNDEvZWE3NjY5NmNjZDhiMjY2ZWZmMmU3NTVkOTI4NmE3ODlhMmI4ZDY0NGExOWIyY2RjY2QzNjU5MzNlOTdjZmE5ND9yZXNwb25zZS1jb250ZW50LWRpc3Bvc2l0aW9uPSoifV19&Signature=BFlf2hKIdIMTSR%7E5dNEt5kHkOVcTfJOy3l8sseMydbGlzagX5bSyy18zLAc450pgxun6NKtv1ke8Db3nCDnl4DSSFOkzxH0zEzNzWN0Jt7P7axDubD%7EE7qeD1VMn1NB-r8OI0QDaF9Z%7EnAd9--fXyiXX7hacB3aCvmnsiwQbHkNy2DO89UKGcKIrf1yvXQMY-uO3RYtypVGqwjpgBBE9pf3n-Z1SEjAHwpxLeqowvj3Jc8yI5M-R60ymjlGe24Zcrdg25ScWJxnzKlqrAPB2p3P9clz7LdxLiI-7Ip0k8TsbkM-5BMe-yc99ED77-qzb6t9qQhlAFCOznu67KjFb-w__&Key-Pair-Id=K24J24Z295AEI9: HTTPSConnectionPool(host='cdn-lfs-us-1.huggingface.co', port=443): Read timed out.\n","Trying to resume download...\n"]},{"data":{"application/vnd.jupyter.widget-view+json":{"model_id":"0c0b15e2f9e344b9ad4274fcc150d55d","version_major":2,"version_minor":0},"text/plain":["model-00008-of-00010.safetensors: 31%|### | 566M/1.84G [00:00<?, ?B/s]"]},"metadata":{},"output_type":"display_data"},{"name":"stderr","output_type":"stream","text":["Error while downloading from https://cdn-lfs-us-1.huggingface.co/repos/8f/69/8f69006a64acf627b9dd5b8ed9962abf5ef416d57fc370004ac5f598cea0df41/ea76696ccd8b266eff2e755d9286a789a2b8d644a19b2cdccd365933e97cfa94?response-content-disposition=inline%3B+filename*%3DUTF-8%27%27model-00008-of-00010.safetensors%3B+filename%3D%22model-00008-of-00010.safetensors%22%3B&Expires=1721695460&Policy=eyJTdGF0ZW1lbnQiOlt7IkNvbmRpdGlvbiI6eyJEYXRlTGVzc1RoYW4iOnsiQVdTOkVwb2NoVGltZSI6MTcyMTY5NTQ2MH19LCJSZXNvdXJjZSI6Imh0dHBzOi8vY2RuLWxmcy11cy0xLmh1Z2dpbmdmYWNlLmNvL3JlcG9zLzhmLzY5LzhmNjkwMDZhNjRhY2Y2MjdiOWRkNWI4ZWQ5OTYyYWJmNWVmNDE2ZDU3ZmMzNzAwMDRhYzVmNTk4Y2VhMGRmNDEvZWE3NjY5NmNjZDhiMjY2ZWZmMmU3NTVkOTI4NmE3ODlhMmI4ZDY0NGExOWIyY2RjY2QzNjU5MzNlOTdjZmE5ND9yZXNwb25zZS1jb250ZW50LWRpc3Bvc2l0aW9uPSoifV19&Signature=BFlf2hKIdIMTSR%7E5dNEt5kHkOVcTfJOy3l8sseMydbGlzagX5bSyy18zLAc450pgxun6NKtv1ke8Db3nCDnl4DSSFOkzxH0zEzNzWN0Jt7P7axDubD%7EE7qeD1VMn1NB-r8OI0QDaF9Z%7EnAd9--fXyiXX7hacB3aCvmnsiwQbHkNy2DO89UKGcKIrf1yvXQMY-uO3RYtypVGqwjpgBBE9pf3n-Z1SEjAHwpxLeqowvj3Jc8yI5M-R60ymjlGe24Zcrdg25ScWJxnzKlqrAPB2p3P9clz7LdxLiI-7Ip0k8TsbkM-5BMe-yc99ED77-qzb6t9qQhlAFCOznu67KjFb-w__&Key-Pair-Id=K24J24Z295AEI9: HTTPSConnectionPool(host='cdn-lfs-us-1.huggingface.co', port=443): Read timed out.\n","Trying to resume download...\n"]},{"data":{"application/vnd.jupyter.widget-view+json":{"model_id":"82f48613f82047a7abc23a99939275ac","version_major":2,"version_minor":0},"text/plain":["model-00008-of-00010.safetensors: 34%|###3 | 619M/1.84G [00:00<?, ?B/s]"]},"metadata":{},"output_type":"display_data"},{"name":"stderr","output_type":"stream","text":["Error while downloading from https://cdn-lfs-us-1.huggingface.co/repos/8f/69/8f69006a64acf627b9dd5b8ed9962abf5ef416d57fc370004ac5f598cea0df41/ea76696ccd8b266eff2e755d9286a789a2b8d644a19b2cdccd365933e97cfa94?response-content-disposition=inline%3B+filename*%3DUTF-8%27%27model-00008-of-00010.safetensors%3B+filename%3D%22model-00008-of-00010.safetensors%22%3B&Expires=1721695460&Policy=eyJTdGF0ZW1lbnQiOlt7IkNvbmRpdGlvbiI6eyJEYXRlTGVzc1RoYW4iOnsiQVdTOkVwb2NoVGltZSI6MTcyMTY5NTQ2MH19LCJSZXNvdXJjZSI6Imh0dHBzOi8vY2RuLWxmcy11cy0xLmh1Z2dpbmdmYWNlLmNvL3JlcG9zLzhmLzY5LzhmNjkwMDZhNjRhY2Y2MjdiOWRkNWI4ZWQ5OTYyYWJmNWVmNDE2ZDU3ZmMzNzAwMDRhYzVmNTk4Y2VhMGRmNDEvZWE3NjY5NmNjZDhiMjY2ZWZmMmU3NTVkOTI4NmE3ODlhMmI4ZDY0NGExOWIyY2RjY2QzNjU5MzNlOTdjZmE5ND9yZXNwb25zZS1jb250ZW50LWRpc3Bvc2l0aW9uPSoifV19&Signature=BFlf2hKIdIMTSR%7E5dNEt5kHkOVcTfJOy3l8sseMydbGlzagX5bSyy18zLAc450pgxun6NKtv1ke8Db3nCDnl4DSSFOkzxH0zEzNzWN0Jt7P7axDubD%7EE7qeD1VMn1NB-r8OI0QDaF9Z%7EnAd9--fXyiXX7hacB3aCvmnsiwQbHkNy2DO89UKGcKIrf1yvXQMY-uO3RYtypVGqwjpgBBE9pf3n-Z1SEjAHwpxLeqowvj3Jc8yI5M-R60ymjlGe24Zcrdg25ScWJxnzKlqrAPB2p3P9clz7LdxLiI-7Ip0k8TsbkM-5BMe-yc99ED77-qzb6t9qQhlAFCOznu67KjFb-w__&Key-Pair-Id=K24J24Z295AEI9: HTTPSConnectionPool(host='cdn-lfs-us-1.huggingface.co', port=443): Read timed out.\n","Trying to resume download...\n"]},{"data":{"application/vnd.jupyter.widget-view+json":{"model_id":"d028a93b6cc146c5abc72f0469ea7481","version_major":2,"version_minor":0},"text/plain":["model-00008-of-00010.safetensors: 37%|###6 | 671M/1.84G [00:00<?, ?B/s]"]},"metadata":{},"output_type":"display_data"},{"data":{"application/vnd.jupyter.widget-view+json":{"model_id":"c838854922d141779d3b2e42067ae07c","version_major":2,"version_minor":0},"text/plain":["model-00009-of-00010.safetensors: 0%| | 0.00/1.99G [00:00<?, ?B/s]"]},"metadata":{},"output_type":"display_data"},{"name":"stderr","output_type":"stream","text":["Error while downloading from https://cdn-lfs-us-1.huggingface.co/repos/8f/69/8f69006a64acf627b9dd5b8ed9962abf5ef416d57fc370004ac5f598cea0df41/6100cba5825057f51778ab70e4c4fdd4485a401a8ef97893bf6b36a8efd48ce3?response-content-disposition=inline%3B+filename*%3DUTF-8%27%27model-00009-of-00010.safetensors%3B+filename%3D%22model-00009-of-00010.safetensors%22%3B&Expires=1721697773&Policy=eyJTdGF0ZW1lbnQiOlt7IkNvbmRpdGlvbiI6eyJEYXRlTGVzc1RoYW4iOnsiQVdTOkVwb2NoVGltZSI6MTcyMTY5Nzc3M319LCJSZXNvdXJjZSI6Imh0dHBzOi8vY2RuLWxmcy11cy0xLmh1Z2dpbmdmYWNlLmNvL3JlcG9zLzhmLzY5LzhmNjkwMDZhNjRhY2Y2MjdiOWRkNWI4ZWQ5OTYyYWJmNWVmNDE2ZDU3ZmMzNzAwMDRhYzVmNTk4Y2VhMGRmNDEvNjEwMGNiYTU4MjUwNTdmNTE3NzhhYjcwZTRjNGZkZDQ0ODVhNDAxYThlZjk3ODkzYmY2YjM2YThlZmQ0OGNlMz9yZXNwb25zZS1jb250ZW50LWRpc3Bvc2l0aW9uPSoifV19&Signature=aGPzuWpi1BvTr2gbos2WuuTyyC-JMAAE1YWe3nZNSwTDzc3vlHUly2jUSCRCa8K2exeOZQsiXFCRSuHsk4Qa6gM1Qa--pmB0zsashQLoK2aazR8uUzzM8H4M7UQoOYTmGsEg9znoKZIMnmajJBWYDyp9MEXMkw%7ELDooaZO-G0DHjYi-bAbarua-D2tCMw%7EuyylMzUisJ2bywAnfDjpJ%7EU9GRVEGzcMYjI3178sOu09es4IYPG-D2beO217KOJsO6W4QtwUsbMmGBKzq2GRV2U6ZNn0wm%7EHogTyk0mIaVYm8a-kHi5JfypmzDGGwyJJCK-XvsTMSRB33rOT84muusGg__&Key-Pair-Id=K24J24Z295AEI9: HTTPSConnectionPool(host='cdn-lfs-us-1.huggingface.co', port=443): Read timed out.\n","Trying to resume download...\n"]},{"data":{"application/vnd.jupyter.widget-view+json":{"model_id":"276e5b1f083e4c40a08a7a94f63a8394","version_major":2,"version_minor":0},"text/plain":["model-00009-of-00010.safetensors: 62%|######2 | 1.24G/1.99G [00:00<?, ?B/s]"]},"metadata":{},"output_type":"display_data"},{"data":{"application/vnd.jupyter.widget-view+json":{"model_id":"7a92b91006b745aa9e3c471a8d33f32b","version_major":2,"version_minor":0},"text/plain":["model-00010-of-00010.safetensors: 0%| | 0.00/1.65G [00:00<?, ?B/s]"]},"metadata":{},"output_type":"display_data"},{"name":"stderr","output_type":"stream","text":["Error while downloading from https://cdn-lfs-us-1.huggingface.co/repos/8f/69/8f69006a64acf627b9dd5b8ed9962abf5ef416d57fc370004ac5f598cea0df41/e12b9195e943b6561c6541008881350dc7488520b9938427c1a7b97ddc147283?response-content-disposition=inline%3B+filename*%3DUTF-8%27%27model-00010-of-00010.safetensors%3B+filename%3D%22model-00010-of-00010.safetensors%22%3B&Expires=1721699528&Policy=eyJTdGF0ZW1lbnQiOlt7IkNvbmRpdGlvbiI6eyJEYXRlTGVzc1RoYW4iOnsiQVdTOkVwb2NoVGltZSI6MTcyMTY5OTUyOH19LCJSZXNvdXJjZSI6Imh0dHBzOi8vY2RuLWxmcy11cy0xLmh1Z2dpbmdmYWNlLmNvL3JlcG9zLzhmLzY5LzhmNjkwMDZhNjRhY2Y2MjdiOWRkNWI4ZWQ5OTYyYWJmNWVmNDE2ZDU3ZmMzNzAwMDRhYzVmNTk4Y2VhMGRmNDEvZTEyYjkxOTVlOTQzYjY1NjFjNjU0MTAwODg4MTM1MGRjNzQ4ODUyMGI5OTM4NDI3YzFhN2I5N2RkYzE0NzI4Mz9yZXNwb25zZS1jb250ZW50LWRpc3Bvc2l0aW9uPSoifV19&Signature=ug6MzTDVzWb5Z6HTBIeFu%7EJkbAaGC%7E%7ECPnM04d9ZtcPtPMBqF-D1i2mc37EggfJDQ7YGHoON-qgUP%7E1GOw6R9klaND3IXCIvugfHfu6IY2k5kuYpN5JIL9yWS0ocYCPdU28fJMXAasjwLLYdXiACgzOEQd8hVQQmCBzw6QPzzAdnkM5ARG5%7EQW%7EVD1nY0aySU1DeYT9gMj74HsJuoJGBbuQSTwZw9i3Wrn9pDbEBW6fJ5uEM-hJIziGhvAX28dW0UKcYVP-8bfvvbcMmax20pZDHUgksqXTbMgJ9f7BdN7GhsH-AzYu9d3FvNXgF2jtoiHrjXA6VcFZyyTOknLr9zw__&Key-Pair-Id=K24J24Z295AEI9: HTTPSConnectionPool(host='cdn-lfs-us-1.huggingface.co', port=443): Read timed out.\n","Trying to resume download...\n"]},{"data":{"application/vnd.jupyter.widget-view+json":{"model_id":"86aa33621fd746e6ae3f0303aba3bb25","version_major":2,"version_minor":0},"text/plain":["model-00010-of-00010.safetensors: 35%|###4 | 577M/1.65G [00:00<?, ?B/s]"]},"metadata":{},"output_type":"display_data"},{"name":"stderr","output_type":"stream","text":["Error while downloading from https://cdn-lfs-us-1.huggingface.co/repos/8f/69/8f69006a64acf627b9dd5b8ed9962abf5ef416d57fc370004ac5f598cea0df41/e12b9195e943b6561c6541008881350dc7488520b9938427c1a7b97ddc147283?response-content-disposition=inline%3B+filename*%3DUTF-8%27%27model-00010-of-00010.safetensors%3B+filename%3D%22model-00010-of-00010.safetensors%22%3B&Expires=1721699528&Policy=eyJTdGF0ZW1lbnQiOlt7IkNvbmRpdGlvbiI6eyJEYXRlTGVzc1RoYW4iOnsiQVdTOkVwb2NoVGltZSI6MTcyMTY5OTUyOH19LCJSZXNvdXJjZSI6Imh0dHBzOi8vY2RuLWxmcy11cy0xLmh1Z2dpbmdmYWNlLmNvL3JlcG9zLzhmLzY5LzhmNjkwMDZhNjRhY2Y2MjdiOWRkNWI4ZWQ5OTYyYWJmNWVmNDE2ZDU3ZmMzNzAwMDRhYzVmNTk4Y2VhMGRmNDEvZTEyYjkxOTVlOTQzYjY1NjFjNjU0MTAwODg4MTM1MGRjNzQ4ODUyMGI5OTM4NDI3YzFhN2I5N2RkYzE0NzI4Mz9yZXNwb25zZS1jb250ZW50LWRpc3Bvc2l0aW9uPSoifV19&Signature=ug6MzTDVzWb5Z6HTBIeFu%7EJkbAaGC%7E%7ECPnM04d9ZtcPtPMBqF-D1i2mc37EggfJDQ7YGHoON-qgUP%7E1GOw6R9klaND3IXCIvugfHfu6IY2k5kuYpN5JIL9yWS0ocYCPdU28fJMXAasjwLLYdXiACgzOEQd8hVQQmCBzw6QPzzAdnkM5ARG5%7EQW%7EVD1nY0aySU1DeYT9gMj74HsJuoJGBbuQSTwZw9i3Wrn9pDbEBW6fJ5uEM-hJIziGhvAX28dW0UKcYVP-8bfvvbcMmax20pZDHUgksqXTbMgJ9f7BdN7GhsH-AzYu9d3FvNXgF2jtoiHrjXA6VcFZyyTOknLr9zw__&Key-Pair-Id=K24J24Z295AEI9: HTTPSConnectionPool(host='cdn-lfs-us-1.huggingface.co', port=443): Read timed out.\n","Trying to resume download...\n"]},{"data":{"application/vnd.jupyter.widget-view+json":{"model_id":"f7b297755911496a9ee53e3560136e23","version_major":2,"version_minor":0},"text/plain":["model-00010-of-00010.safetensors: 64%|######4 | 1.06G/1.65G [00:00<?, ?B/s]"]},"metadata":{},"output_type":"display_data"},{"name":"stderr","output_type":"stream","text":["Error while downloading from https://cdn-lfs-us-1.huggingface.co/repos/8f/69/8f69006a64acf627b9dd5b8ed9962abf5ef416d57fc370004ac5f598cea0df41/e12b9195e943b6561c6541008881350dc7488520b9938427c1a7b97ddc147283?response-content-disposition=inline%3B+filename*%3DUTF-8%27%27model-00010-of-00010.safetensors%3B+filename%3D%22model-00010-of-00010.safetensors%22%3B&Expires=1721699528&Policy=eyJTdGF0ZW1lbnQiOlt7IkNvbmRpdGlvbiI6eyJEYXRlTGVzc1RoYW4iOnsiQVdTOkVwb2NoVGltZSI6MTcyMTY5OTUyOH19LCJSZXNvdXJjZSI6Imh0dHBzOi8vY2RuLWxmcy11cy0xLmh1Z2dpbmdmYWNlLmNvL3JlcG9zLzhmLzY5LzhmNjkwMDZhNjRhY2Y2MjdiOWRkNWI4ZWQ5OTYyYWJmNWVmNDE2ZDU3ZmMzNzAwMDRhYzVmNTk4Y2VhMGRmNDEvZTEyYjkxOTVlOTQzYjY1NjFjNjU0MTAwODg4MTM1MGRjNzQ4ODUyMGI5OTM4NDI3YzFhN2I5N2RkYzE0NzI4Mz9yZXNwb25zZS1jb250ZW50LWRpc3Bvc2l0aW9uPSoifV19&Signature=ug6MzTDVzWb5Z6HTBIeFu%7EJkbAaGC%7E%7ECPnM04d9ZtcPtPMBqF-D1i2mc37EggfJDQ7YGHoON-qgUP%7E1GOw6R9klaND3IXCIvugfHfu6IY2k5kuYpN5JIL9yWS0ocYCPdU28fJMXAasjwLLYdXiACgzOEQd8hVQQmCBzw6QPzzAdnkM5ARG5%7EQW%7EVD1nY0aySU1DeYT9gMj74HsJuoJGBbuQSTwZw9i3Wrn9pDbEBW6fJ5uEM-hJIziGhvAX28dW0UKcYVP-8bfvvbcMmax20pZDHUgksqXTbMgJ9f7BdN7GhsH-AzYu9d3FvNXgF2jtoiHrjXA6VcFZyyTOknLr9zw__&Key-Pair-Id=K24J24Z295AEI9: HTTPSConnectionPool(host='cdn-lfs-us-1.huggingface.co', port=443): Read timed out.\n","Trying to resume download...\n"]},{"data":{"application/vnd.jupyter.widget-view+json":{"model_id":"16b66bf89ab24bad9bd8732e382e7262","version_major":2,"version_minor":0},"text/plain":["model-00010-of-00010.safetensors: 64%|######4 | 1.06G/1.65G [00:00<?, ?B/s]"]},"metadata":{},"output_type":"display_data"}],"source":["%%time\n","\n","model, tokenizer = load_model(model_name, adapter_name_or_path=adapter_name_or_path, using_llama_factory=False)"]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[{"ename":"NameError","evalue":"name 'tokenizer' is not defined","output_type":"error","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)","Cell \u001b[0;32mIn[13], line 3\u001b[0m\n\u001b[1;32m 1\u001b[0m datasets \u001b[38;5;241m=\u001b[39m load_logical_reasoning_dataset(\n\u001b[1;32m 2\u001b[0m data_path,\n\u001b[0;32m----> 3\u001b[0m tokenizer\u001b[38;5;241m=\u001b[39m\u001b[43mtokenizer\u001b[49m,\n\u001b[1;32m 4\u001b[0m chinese_prompt\u001b[38;5;241m=\u001b[39m\u001b[38;5;129;01mnot\u001b[39;00m use_english_datasets,\n\u001b[1;32m 5\u001b[0m using_p1\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m,\n\u001b[1;32m 6\u001b[0m )\n","\u001b[0;31mNameError\u001b[0m: name 'tokenizer' is not defined"]}],"source":["datasets = load_logical_reasoning_dataset(\n"," data_path,\n"," tokenizer=tokenizer,\n"," chinese_prompt=not use_english_datasets,\n"," using_p1=False,\n",")"]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":["def evaluate_model(model, tokenizer, model_name, dataset, batch_size=1):\n"," print(f\"Evaluating model: {model_name} on {device}\")\n"," predictions = eval_model(\n"," model, tokenizer, dataset, device=device, batch_size=batch_size\n"," )\n","\n"," save_results(\n"," model_name,\n"," results_path,\n"," dataset,\n"," predictions,\n"," debug=False,\n"," )\n","\n"," metrics = calc_metrics(dataset[\"label\"], predictions, debug=False)\n"," print(metrics)"]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":["Evaluating model: internlm/internlm2_5-7b-chat-1m_llama-factory/saves/internlm2_5_7b/lora/sft_bf16_p2_full/checkpoint-88 on mps\n"]},{"name":"stderr","output_type":"stream","text":[" 0%| | 1/3000 [00:06<5:23:35, 6.47s/it]"]},{"name":"stdout","output_type":"stream","text":["--------\n","step 1: 不是</s>\n","--------\n","step 2: 不是\n","--------\n","step 3: 不是\n","--------\n","step 4: 不是\n","--------\n","step 5: 不是\n"]},{"name":"stderr","output_type":"stream","text":[" 22%|██▏ | 657/3000 [40:38<2:18:00, 3.53s/it]"]}],"source":["%%time\n","\n","evaluate_model(model, tokenizer, f\"{model_name}{'_' + adapter_name_or_path if adapter_name_or_path else ''}\", datasets[\"test\"], batch_size=16)"]}],"metadata":{"accelerator":"GPU","application/vnd.databricks.v1+notebook":{"dashboards":[],"environmentMetadata":null,"language":"python","notebookMetadata":{"mostRecentlyExecutedCommandWithImplicitDF":{"commandId":-1,"dataframes":["_sqldf"]},"pythonIndentUnit":4},"notebookName":"10_eval-lf-medium-py3.11","widgets":{}},"colab":{"gpuType":"L4","provenance":[]},"kernelspec":{"display_name":"Python 3","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.11.9"}},"nbformat":4,"nbformat_minor":0}
|
|
|
1 |
+
{"cells":[{"cell_type":"code","execution_count":1,"metadata":{"executionInfo":{"elapsed":476,"status":"ok","timestamp":1720679526275,"user":{"displayName":"HUANG DONGHAO _","userId":"00977795705617022768"},"user_tz":-480},"id":"uWKRSV6eZsCn"},"outputs":[],"source":["%load_ext autoreload\n","%autoreload 2"]},{"cell_type":"code","execution_count":2,"metadata":{"application/vnd.databricks.v1+cell":{"cellMetadata":{"byteLimit":2048000,"rowLimit":10000},"inputWidgets":{},"nuid":"eb33b19f-1206-41ee-84e2-e6258a12eef7","showTitle":false,"title":""},"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":2534,"status":"ok","timestamp":1720679529344,"user":{"displayName":"HUANG DONGHAO _","userId":"00977795705617022768"},"user_tz":-480},"id":"xwFh14uiZBrI","outputId":"d767799c-34c2-46a5-f052-378146a55321"},"outputs":[],"source":["from pathlib import Path\n","\n","try:\n"," from google.colab import drive\n","\n"," drive.mount(\"/content/drive\")\n"," workding_dir = \"/content/drive/MyDrive/logical-reasoning/\"\n","except ModuleNotFoundError:\n"," workding_dir = str(Path.cwd().parent)"]},{"cell_type":"code","execution_count":3,"metadata":{"application/vnd.databricks.v1+cell":{"cellMetadata":{"byteLimit":2048000,"rowLimit":10000},"inputWidgets":{},"nuid":"6d394937-6c99-4a7c-9d32-7600a280032f","showTitle":false,"title":""},"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":5,"status":"ok","timestamp":1720679529345,"user":{"displayName":"HUANG DONGHAO _","userId":"00977795705617022768"},"user_tz":-480},"id":"G5pNu3zgZBrL","outputId":"160a554f-fb08-4aa0-bc00-0422fb7c1fac"},"outputs":[{"name":"stdout","output_type":"stream","text":["workding dir: /Users/inflaton/code/engd/projects/logical-reasoning\n"]}],"source":["import os\n","import sys\n","from pathlib import Path\n","\n","os.chdir(workding_dir)\n","sys.path.append(workding_dir)\n","print(\"workding dir:\", workding_dir)"]},{"cell_type":"code","execution_count":4,"metadata":{"application/vnd.databricks.v1+cell":{"cellMetadata":{"byteLimit":2048000,"rowLimit":10000},"inputWidgets":{},"nuid":"ac667aba-076e-4de6-9984-8f6a67cb09cd","showTitle":false,"title":""},"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":4,"status":"ok","timestamp":1720679529345,"user":{"displayName":"HUANG DONGHAO _","userId":"00977795705617022768"},"user_tz":-480},"id":"0dVRAabNZBrL","outputId":"b977e116-df16-47cd-9160-a24f611da687"},"outputs":[{"data":{"text/plain":["False"]},"execution_count":4,"metadata":{},"output_type":"execute_result"}],"source":["need_to_setup_env = False\n","need_to_setup_env"]},{"cell_type":"code","execution_count":5,"metadata":{"application/vnd.databricks.v1+cell":{"cellMetadata":{"byteLimit":2048000,"rowLimit":10000},"inputWidgets":{},"nuid":"72f9cf79-7b0d-4d9e-90a0-1fa5251b947f","showTitle":false,"title":""},"executionInfo":{"elapsed":4,"status":"ok","timestamp":1720679529345,"user":{"displayName":"HUANG DONGHAO _","userId":"00977795705617022768"},"user_tz":-480},"id":"hKUOfP2HZBrL"},"outputs":[],"source":["if need_to_setup_env:\n"," %pip install -r requirements.txt\n"," %cd /content/\n"," %rm -rf LLaMA-Factory\n"," !git clone https://github.com/hiyouga/LLaMA-Factory.git\n"," %cd LLaMA-Factory\n"," %ls\n"," %pip install -e .[torch,bitsandbytes]\n"," \n"," os.chdir(workding_dir)\n"," sys.path.append(workding_dir)\n"," print(\"workding dir:\", workding_dir)"]},{"cell_type":"code","execution_count":6,"metadata":{"application/vnd.databricks.v1+cell":{"cellMetadata":{"byteLimit":2048000,"rowLimit":10000},"inputWidgets":{},"nuid":"9f67ec60-2f24-411c-84eb-0dd664b44775","showTitle":false,"title":""},"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":3,"status":"ok","timestamp":1720679529345,"user":{"displayName":"HUANG DONGHAO _","userId":"00977795705617022768"},"user_tz":-480},"id":"hPCC-6m7ZBrM","outputId":"c7aa2c96-5e99-440a-c148-201d79465ff9"},"outputs":[{"name":"stdout","output_type":"stream","text":["loading env vars from: /Users/inflaton/code/engd/projects/logical-reasoning/.env\n"]},{"data":{"text/plain":["True"]},"execution_count":6,"metadata":{},"output_type":"execute_result"}],"source":["from dotenv import find_dotenv, load_dotenv\n","\n","found_dotenv = find_dotenv(\".env\")\n","\n","if len(found_dotenv) == 0:\n"," found_dotenv = find_dotenv(\".env.example\")\n","print(f\"loading env vars from: {found_dotenv}\")\n","load_dotenv(found_dotenv, override=True)"]},{"cell_type":"code","execution_count":7,"metadata":{"application/vnd.databricks.v1+cell":{"cellMetadata":{"byteLimit":2048000,"rowLimit":10000},"inputWidgets":{},"nuid":"f1597656-8042-4878-9d3b-9ebfb8dd86dc","showTitle":false,"title":""},"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":3,"status":"ok","timestamp":1720679529345,"user":{"displayName":"HUANG DONGHAO _","userId":"00977795705617022768"},"user_tz":-480},"id":"1M3IraVtZBrM","outputId":"29ab35f6-2970-4ade-d85d-3174acf8cda0"},"outputs":[{"name":"stdout","output_type":"stream","text":["THUDM/glm-4-9b-chat-1m None False datasets/mgtv results/mgtv-results_m3.csv\n"]}],"source":["import os\n","\n","model_name = os.getenv(\"MODEL_NAME\")\n","adapter_name_or_path = os.getenv(\"ADAPTER_NAME_OR_PATH\")\n","load_in_4bit = os.getenv(\"LOAD_IN_4BIT\") == \"true\"\n","data_path = os.getenv(\"LOGICAL_REASONING_DATA_PATH\")\n","results_path = os.getenv(\"LOGICAL_REASONING_RESULTS_PATH\")\n","use_english_datasets = os.getenv(\"USE_ENGLISH_DATASETS\") == \"true\"\n","\n","print(model_name, adapter_name_or_path, load_in_4bit, data_path, results_path)"]},{"cell_type":"code","execution_count":8,"metadata":{"application/vnd.databricks.v1+cell":{"cellMetadata":{"byteLimit":2048000,"rowLimit":10000},"inputWidgets":{},"nuid":"b2a43943-9324-4839-9a47-cfa72de2244b","showTitle":false,"title":""},"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":564,"status":"ok","timestamp":1720679529907,"user":{"displayName":"HUANG DONGHAO _","userId":"00977795705617022768"},"user_tz":-480},"id":"UgMvt6dIZBrM","outputId":"ce37581c-fd26-46c2-ad87-d933d99f68f7"},"outputs":[{"name":"stdout","output_type":"stream","text":["Python 3.11.9\n","\u001b[33mWARNING: Package(s) not found: flash-attn\u001b[0m\u001b[33m\n","\u001b[0mCPU times: user 3.48 ms, sys: 6.05 ms, total: 9.54 ms\n","Wall time: 634 ms\n"]}],"source":["%%time\n","!python --version\n","!pip show flash-attn"]},{"cell_type":"code","execution_count":9,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":1685,"status":"ok","timestamp":1720679531591,"user":{"displayName":"HUANG DONGHAO _","userId":"00977795705617022768"},"user_tz":-480},"id":"ZuS_FsLyZBrN","outputId":"2cba0105-c505-4395-afbd-2f2fee6581d0"},"outputs":[{"name":"stdout","output_type":"stream","text":["loading /Users/inflaton/code/engd/projects/logical-reasoning/llm_toolkit/logical_reasoning_utils.py\n","MPS is available\n"]}],"source":["from llm_toolkit.llm_utils import *\n","from llm_toolkit.logical_reasoning_utils import *\n","\n","device = check_gpu()"]},{"cell_type":"code","execution_count":10,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":["loading model: THUDM/glm-4-9b-chat-1m with adapter: None\n"]},{"name":"stderr","output_type":"stream","text":["Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n"]},{"data":{"application/vnd.jupyter.widget-view+json":{"model_id":"5c2efdd89c3848a99a92b1dd3597cbd1","version_major":2,"version_minor":0},"text/plain":["Loading checkpoint shards: 0%| | 0/10 [00:00<?, ?it/s]"]},"metadata":{},"output_type":"display_data"},{"name":"stdout","output_type":"stream","text":["CPU times: user 1.07 s, sys: 1.28 s, total: 2.35 s\n","Wall time: 7.08 s\n"]}],"source":["%%time\n","\n","model, tokenizer = load_model(model_name, adapter_name_or_path=adapter_name_or_path, using_llama_factory=False)"]},{"cell_type":"code","execution_count":11,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":["loading train/test data files\n","DatasetDict({\n"," train: Dataset({\n"," features: ['text', 'label', 'answer', 'title', 'puzzle', 'truth', 'train_text', 'prompt'],\n"," num_rows: 25000\n"," })\n"," test: Dataset({\n"," features: ['text', 'label', 'answer', 'title', 'puzzle', 'truth', 'train_text', 'prompt'],\n"," num_rows: 3000\n"," })\n","})\n"]}],"source":["datasets = load_logical_reasoning_dataset(\n"," data_path,\n"," tokenizer=tokenizer,\n"," chinese_prompt=not use_english_datasets,\n"," using_p1=False,\n",")"]},{"cell_type":"code","execution_count":12,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":["--------------------------------------------------\n","text: 甄加索是自杀吗\n","--------------------------------------------------\n","label: 不是\n","--------------------------------------------------\n","answer: nan\n","--------------------------------------------------\n","title: 海岸之谜\n","--------------------------------------------------\n","puzzle: 在远离城市喧嚣的海边小屋,一天清晨,邻居发现甄加索僵卧在沙滩上,已无生命迹象。现场没有发现任何打斗的迹象。请问甄加索的死因是什么?\n","--------------------------------------------------\n","truth: 甄加索是一位热爱自然的画家,他每年都会来到这个海边小屋寻找灵感。在他生命的最后几天,他一直在创作一幅描绘海洋生物的画作。在画即将完成的前一天晚上,他骑着自行车外出,打算在海边观赏夜景。然而,他在沙滩上意外发现了一只搁浅的海豚,为了救助这只海豚,他耗费了极大的体力,最终成功将其送回海中。筋疲力尽的甄加索在沙滩上睡着了,由于他患有严重的心脏病,却未告知旁人,在寒冷的海风中,他的心脏停止了跳动。因此,警方在现场只发现了车轮痕迹和未完成的画作,而没有发现任何他杀的迹象。\n","--------------------------------------------------\n","train_text: [gMASK]<sop><|system|>\n","You are an expert in logical reasoning.<|user|>\n","你是一个情景猜谜游戏的主持人。游戏规则如下:\n","\n","1. 参与者会得到一个谜面,谜面会描述一个简单又难以理解的事件。\n","2. 主持人知道谜底,谜底是谜面的答案。\n","3. 参与者可以询问任何封闭式问题来找寻事件的真相。\n","4. 对于每个问题,主持人将根据实际情况回答以下五个选项之一:是、不是、不重要、回答正确、问法错误。各回答的判断标准如下:\n"," - 若谜面和谜底能找到问题的答案,回答:是或者不是\n"," - 若谜面和谜底不能直接或者间接推断出问题的答案,回答:不重要\n"," - 若参与者提问不是一个封闭式问题或者问题难以理解,回答:问法错误\n"," - 若参与者提问基本还原了谜底真相,回答:回答正确\n","5. 回答中不能添加任何其它信息,也不能省略选项中的任何一个字。例如,不可以把“不是”省略成“不”。\n","\n","请严格按照这些规则回答参与者提出的问题。\n","\n","**谜面:** 在远离城市喧嚣的海边小屋,一天清晨,邻居发现甄加索僵卧在沙滩上,已无生命迹象。现场没有发现任何打斗的迹象。请问甄加索的死因是什么?\n","\n","**谜底:** 甄加索是一位热爱自然的画家,他每年都会来到这个海边小屋寻找灵感。在他生命的最后几天,他一直在创作一幅描绘海洋生物的画作。在画即将完成的前一天晚上,他骑着自行车外出,打算在海边观赏夜景。然而,他在沙滩上意外发现了���只搁浅的海豚,为了救助这只海豚,他耗费了极大的体力,最终成功将其送回海中。筋疲力尽的甄加索在沙滩上睡着了,由于他患有严重的心脏病,却未告知旁人,在寒冷的海风中,他的心脏停止了跳动。因此,警方在现场只发现了车轮痕迹和未完成的画作,而没有发现任何他杀的迹象。\n","\n","**参与者提出的问题:** 甄加索是自杀吗\n","<|assistant|>不是<|endoftext|>\n","--------------------------------------------------\n","prompt: [gMASK]<sop><|system|>\n","You are an expert in logical reasoning.<|user|>\n","你是一个情景猜谜游戏的主持人。游戏规则如下:\n","\n","1. 参与者会得到一个谜面,谜面会描述一个简单又难以理解的事件。\n","2. 主持人知道谜底,谜底是谜面的答案。\n","3. 参与者可以询问任何封闭式问题来找寻事件的真相。\n","4. 对于每个问题,主持人将根据实际情况回答以下五个选项之一:是、不是、不重要、回答正确、问法错误。各回答的判断标准如下:\n"," - 若谜面和谜底能找到问题的答案,回答:是或者不是\n"," - 若谜面和谜底不能直接或者间接推断出问题的答案,回答:不重要\n"," - 若参与者提问不是一个封闭式问题或者问题难以理解,回答:问法错误\n"," - 若参与者提问基本还原了谜底真相,回答:回答正确\n","5. 回答中不能添加任何其它信息,也不能省略选项中的任何一个字。例如,不可以把“不是”省略成“不”。\n","\n","请严格按照这些规则回答参与者提出的问题。\n","\n","**谜面:** 在远离城市喧嚣的海边小屋,一天清晨,邻居发现甄加索僵卧在沙滩上,已无生命迹象。现场没有发现任何打斗的迹象。请问甄加索的死因是什么?\n","\n","**谜底:** 甄加索是一位热爱自然的画家,他每年都会来到这个海边小屋寻找灵感。在他生命的最后几天,他一直在创作一幅描绘海洋生物的画作。在画即将完成的前一天晚上,他骑着自行车外出,打算在海边观赏夜景。然而,他在沙滩上意外发现了一只搁浅的海豚,为了救助这只海豚,他耗费了极大的体力,最终成功将其送回海中。筋疲力尽的甄加索在沙滩上睡着了,由于他患有严重的心脏病,却未告知旁人,在寒冷的海风中,他的心脏停止了跳动。因此,警方在现场只发现了车轮痕迹和未完成的画作,而没有发现任何他杀的迹象。\n","\n","**参与者提出的问题:** 甄加索是自杀吗\n","<|assistant|>\n"]}],"source":["print_row_details(datasets[\"test\"].to_pandas())"]},{"cell_type":"code","execution_count":13,"metadata":{},"outputs":[],"source":["def evaluate_model(model, tokenizer, model_name, dataset, batch_size=1):\n"," print(f\"Evaluating model: {model_name} on {device}\")\n"," predictions = eval_model(\n"," model, tokenizer, dataset, device=device, batch_size=batch_size\n"," )\n","\n"," save_results(\n"," model_name,\n"," results_path,\n"," dataset,\n"," predictions,\n"," debug=False,\n"," )\n","\n"," metrics = calc_metrics(dataset[\"label\"], predictions, debug=False)\n"," print(metrics)"]},{"cell_type":"code","execution_count":14,"metadata":{},"outputs":[{"name":"stderr","output_type":"stream","text":["/Users/inflaton/anaconda3/envs/logical-reasoning/lib/python3.11/site-packages/transformers/generation/utils.py:1513: UserWarning: The operator 'aten::isin.Tensor_Tensor_out' is not currently supported on the MPS backend and will fall back to run on the CPU. This may have performance implications. (Triggered internally at /Users/runner/work/pytorch/pytorch/pytorch/aten/src/ATen/mps/MPSFallback.mm:13.)\n"," if eos_token_id is not None and torch.isin(elements=eos_token_id, test_elements=pad_token_id).any():\n"]},{"name":"stdout","output_type":"stream","text":["\n","是\n","CPU times: user 888 ms, sys: 312 ms, total: 1.2 s\n","Wall time: 2.29 s\n"]}],"source":["%%time\n","\n","prompt1 = datasets[\"test\"][\"prompt\"][1000]\n","\n","gen_kwargs = {\"max_length\": 4096, \"do_sample\": True, \"top_k\": 1}\n","with torch.no_grad():\n"," inputs = tokenizer(\n"," [prompt1],\n"," return_tensors=\"pt\",\n"," ).to(device)\n"," outputs = model.generate(**inputs, **gen_kwargs)\n"," outputs = outputs[:, inputs['input_ids'].shape[1]:]\n"," print(tokenizer.decode(outputs[0], skip_special_tokens=True))"]},{"cell_type":"code","execution_count":15,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":["Evaluating model: THUDM/glm-4-9b-chat-1m on mps\n"]},{"name":"stderr","output_type":"stream","text":[" 1%| | 1/188 [01:42<5:20:47, 102.93s/it]"]},{"name":"stdout","output_type":"stream","text":["--------\n","step 1: \n","不是\n","--------\n","step 2: \n","不是\n","--------\n","step 3: \n","不是\n","--------\n","step 4: 不是\n","--------\n","step 5: 不是\n"]},{"name":"stderr","output_type":"stream","text":[" 7%|▋ | 14/188 [1:16:01<23:24:19, 484.25s/it]"]}],"source":["%%time\n","\n","evaluate_model(model, tokenizer, f\"{model_name}{'_' + adapter_name_or_path if adapter_name_or_path else ''}\", datasets[\"test\"], batch_size=16)"]}],"metadata":{"accelerator":"GPU","application/vnd.databricks.v1+notebook":{"dashboards":[],"environmentMetadata":null,"language":"python","notebookMetadata":{"mostRecentlyExecutedCommandWithImplicitDF":{"commandId":-1,"dataframes":["_sqldf"]},"pythonIndentUnit":4},"notebookName":"10_eval-lf-medium-py3.11","widgets":{}},"colab":{"gpuType":"L4","provenance":[]},"kernelspec":{"display_name":"Python 3","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.11.9"}},"nbformat":4,"nbformat_minor":0}
|
llm_toolkit/llm_utils.py
CHANGED
@@ -121,7 +121,7 @@ def extract_answer(text, debug=False):
|
|
121 |
if debug:
|
122 |
print("--------\nstep 3:", text)
|
123 |
|
124 |
-
text = text.split("
|
125 |
if debug:
|
126 |
print("--------\nstep 4:", text)
|
127 |
|
@@ -137,9 +137,6 @@ def extract_answer(text, debug=False):
|
|
137 |
return text
|
138 |
|
139 |
|
140 |
-
from tqdm import tqdm
|
141 |
-
|
142 |
-
|
143 |
def eval_model(
|
144 |
model,
|
145 |
tokenizer,
|
@@ -147,34 +144,39 @@ def eval_model(
|
|
147 |
device="cuda",
|
148 |
max_new_tokens=4096,
|
149 |
repetition_penalty=1.0,
|
150 |
-
batch_size=1,
|
151 |
):
|
152 |
total = len(eval_dataset)
|
153 |
predictions = []
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
|
|
|
|
|
|
|
|
|
|
178 |
|
179 |
return predictions
|
180 |
|
|
|
121 |
if debug:
|
122 |
print("--------\nstep 3:", text)
|
123 |
|
124 |
+
text = text.split("。")[0].strip()
|
125 |
if debug:
|
126 |
print("--------\nstep 4:", text)
|
127 |
|
|
|
137 |
return text
|
138 |
|
139 |
|
|
|
|
|
|
|
140 |
def eval_model(
|
141 |
model,
|
142 |
tokenizer,
|
|
|
144 |
device="cuda",
|
145 |
max_new_tokens=4096,
|
146 |
repetition_penalty=1.0,
|
147 |
+
batch_size=1,
|
148 |
):
|
149 |
total = len(eval_dataset)
|
150 |
predictions = []
|
151 |
+
|
152 |
+
model.eval()
|
153 |
+
|
154 |
+
with torch.no_grad():
|
155 |
+
for i in tqdm(range(0, total, batch_size)): # Iterate in batches
|
156 |
+
batch_end = min(i + batch_size, total) # Ensure not to exceed dataset
|
157 |
+
batch_prompts = eval_dataset["prompt"][i:batch_end]
|
158 |
+
inputs = tokenizer(
|
159 |
+
batch_prompts,
|
160 |
+
return_tensors="pt",
|
161 |
+
padding=True, # Ensure all inputs in the batch have the same length
|
162 |
+
).to(device)
|
163 |
+
|
164 |
+
outputs = model.generate(
|
165 |
+
**inputs,
|
166 |
+
max_new_tokens=max_new_tokens,
|
167 |
+
repetition_penalty=repetition_penalty,
|
168 |
+
use_cache=False,
|
169 |
+
)
|
170 |
+
outputs = outputs[:, inputs["input_ids"].shape[1] :]
|
171 |
+
decoded_output = tokenizer.batch_decode(
|
172 |
+
outputs, skip_special_tokens=True
|
173 |
+
) # Skip special tokens for clean output
|
174 |
+
debug = i == 0
|
175 |
+
decoded_output = [
|
176 |
+
extract_answer(output, debug=debug and j == 0)
|
177 |
+
for j, output in enumerate(decoded_output)
|
178 |
+
]
|
179 |
+
predictions.extend(decoded_output)
|
180 |
|
181 |
return predictions
|
182 |
|
results/mgtv-llama3_p1_full_metrics.csv
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5b4210453d59ad49c11feef76cf64cee627fe0dfecdf399cd444e8c99b6647cc
|
3 |
+
size 986
|
results/mgtv-llama3_p2_full_metrics.csv
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f411f819fe8137dc75d5addb5b1d6e5a2244ceb0bfe87554e3b82d42dcfb4a17
|
3 |
+
size 977
|