{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "True" ] }, "execution_count": 1, "metadata": {}, "output_type": "execute_result" } ], "source": [ "import torch\n", "import torch.nn as nn\n", "torch.cuda.is_available()" ] }, { "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ "Check that we can load the tokenizer and the model. The first time this runs it will take a while. The files go under ~/.cache/huggingface" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "/home/matt/hf/sqllama-V0/.venv/lib/python3.7/site-packages/bitsandbytes/cuda_setup/main.py:136: UserWarning: /opt/conda did not contain libcudart.so as expected! Searching further paths...\n", " warn(msg)\n", "The tokenizer class you load from this checkpoint is not the same type as the class this function is called from. It may result in unexpected tokenization. \n", "The tokenizer class you load from this checkpoint is 'LLaMATokenizer'. \n", "The class this function is called from is 'LlamaTokenizer'.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "\n", "===================================BUG REPORT===================================\n", "Welcome to bitsandbytes. For bug reports, please submit your error trace to: https://github.com/TimDettmers/bitsandbytes/issues\n", "================================================================================\n", "CUDA SETUP: CUDA runtime path found: /usr/local/cuda/lib64/libcudart.so\n", "CUDA SETUP: Highest compute capability among GPUs detected: 7.5\n", "CUDA SETUP: Detected CUDA version 113\n", "CUDA SETUP: Loading binary /home/matt/hf/sqllama-V0/.venv/lib/python3.7/site-packages/bitsandbytes/libbitsandbytes_cuda113.so...\n" ] }, { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "5616e727844a4f0b9efaff97aa2f9d75", "version_major": 2, "version_minor": 0 }, "text/plain": [ "Loading checkpoint shards: 0%| | 0/33 [00:00, ?it/s]" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "from transformers import LlamaTokenizer, LlamaForCausalLM\n", "from peft import prepare_model_for_int8_training\n", "tokenizer = LlamaTokenizer.from_pretrained(\n", " \"decapoda-research/llama-7b-hf\")\n", " \n", "#tokenizer.pad_token_id = (0)\n", "#tokenizer.padding_side = 'left'\n", "model = LlamaForCausalLM.from_pretrained(\n", " \"decapoda-research/llama-7b-hf\",\n", " load_in_8bit=True,\n", " device_map=\"auto\",\n", " torch_dtype=torch.float16\n", ")\n", "\n", "model = prepare_model_for_int8_training(model)\n" ] }, { "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ "Construct a basic example to interact with the model" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "/home/matt/hf/sqllama-V0/.venv/lib/python3.7/site-packages/transformers/generation/utils.py:1220: UserWarning: You have modified the pretrained model configuration to control generation. This is a deprecated strategy to control generation and will be removed soon, in a future version. Please use a generation configuration file (see https://huggingface.co/docs/transformers/main_classes/text_generation)\n", " \"You have modified the pretrained model configuration to control generation. This is a\"\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ " ⁇ hey dude, talk to me.\n", "I'm a 20 year old guy from the UK. I'm a bit of a nerd, I like to read, I like to write, I like to play video games, I like to watch movies, I like to listen\n", " ⁇ whats the capital of georgia?\n", "What is the capital of Georgia?\n", "The capital of Georgia is Atlanta.\n", "What is the capital of Georgia?\n", "The capital of Georgia is Atlanta. The capital of Georgia is Atlanta. The capital of Georgia is Atlanta. The capital of Georgia is Atlanta. The\n" ] } ], "source": [ "def ask(q,l=64):\n", " toks = tokenizer(q , return_tensors='pt')\n", " ctoks = toks.input_ids.to('cuda')\n", " gen = model.generate(ctoks, max_length=64)\n", " return tokenizer.decode(gen[0])\n", "\n", "r = ask('hey dude, talk to me')\n", "print(r)\n", "\n", "r = ask('whats the capital of georgia?')\n", "print(r)\n" ] }, { "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ "Get the WikiSQL project so we can get the dataset" ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "fatal: destination path 'WikiSQL' already exists and is not an empty directory.\n", "data/\n", "data/train.jsonl\n", "data/test.tables.jsonl\n", "data/test.db\n", "data/dev.tables.jsonl\n", "data/dev.db\n", "data/test.jsonl\n", "data/train.tables.jsonl\n", "data/train.db\n", "data/dev.jsonl\n" ] } ], "source": [ "! git clone https://github.com/salesforce/WikiSQL\n", "! tar xvjf WikiSQL/data.tar.bz2" ] }, { "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ "Figure out what the actual data set has in it." ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "{'phase': 1, 'table_id': '1-1000181-1', 'question': 'Tell me what the notes are for South Australia ', 'sql': {'sel': 5, 'conds': [[3, 0, 'SOUTH AUSTRALIA']], 'agg': 0}}\n", "\n", "1-1000181-1\n", "['State/territory', 'Text/background colour', 'Format', 'Current slogan', 'Current series', 'Notes']\n", "\n", "{'id': '1-1000181-1', 'header': ['State/territory', 'Text/background colour', 'Format', 'Current slogan', 'Current series', 'Notes'], 'types': ['text', 'text', 'text', 'text', 'text', 'text'], 'rows': [['Australian Capital Territory', 'blue/white', 'Yaa·nna', 'ACT · CELEBRATION OF A CENTURY 2013', 'YIL·00A', 'Slogan screenprinted on plate'], ['New South Wales', 'black/yellow', 'aa·nn·aa', 'NEW SOUTH WALES', 'BX·99·HI', 'No slogan on current series'], ['New South Wales', 'black/white', 'aaa·nna', 'NSW', 'CPX·12A', 'Optional white slimline series'], ['Northern Territory', 'ochre/white', 'Ca·nn·aa', 'NT · OUTBACK AUSTRALIA', 'CB·06·ZZ', 'New series began in June 2011'], ['Queensland', 'maroon/white', 'nnn·aaa', 'QUEENSLAND · SUNSHINE STATE', '999·TLG', 'Slogan embossed on plate'], ['South Australia', 'black/white', 'Snnn·aaa', 'SOUTH AUSTRALIA', 'S000·AZD', 'No slogan on current series'], ['Victoria', 'blue/white', 'aaa·nnn', 'VICTORIA - THE PLACE TO BE', 'ZZZ·562', 'Current series will be exhausted this year']], 'name': 'table_1000181_1'}\n", "SELECT col5 FROM table WHERE col3 = SOUTH AUSTRALIA\n", "SELECT Notes FROM table WHERE Current slogan = SOUTH AUSTRALIA\n" ] } ], "source": [ "import json\n", "import sys\n", "import re\n", "sys.path.append('./WikiSQL')\n", "from WikiSQL.lib.query import Query\n", "\n", "def replace_cols(qs,cmap):\n", " for k,v in cmap.items():\n", " qs = re.sub(f' {k} ',f' {v} ',qs)\n", " return qs\n", "\n", "with open('data/train.jsonl') as f:\n", " j = f.readline()\n", " js = json.loads(j)\n", " print(js)\n", " \n", "print()\n", "\n", "with open('data/train.tables.jsonl') as f:\n", " j = f.readline()\n", " js2 = json.loads(j)\n", " #print(js)\n", " print(js2['id'])\n", " print(js2['header'])\n", " print()\n", " print(js2)\n", "\n", "sql = js['sql']\n", "q = Query.from_dict(sql)\n", "print(q)\n", "cm = {f'col{i}':js2['header'][i] for i in range(len(js2['header']))}\n", "qs = replace_cols(str(q),cm)\n", "print(qs)" ] }, { "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ "Ok, their query class deals poorly with stringifying the constraints. Let's mock up natural language prompt and SQL response." ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.\n", "### Question: How many games does novica veličković have when there's more than 24 rebounds?\n", "### Input: Table 2-16050349-8 has columns Rank (real),Name (text),Team (text),Games (real),Rebounds (real). \n", "### Answer: SELECT COUNT Games FROM 2-16050349-8 WHERE Name = 'novica veličković' AND Rebounds > 24\n", "\n", "Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.\n", "### Question: What is the number of capacity at somerset park?\n", "### Input: Table 1-11206787-5 has columns Team (text),Stadium (text),Capacity (real),Highest (real),Lowest (real),Average (real). \n", "### Answer: SELECT COUNT Capacity FROM 1-11206787-5 WHERE Stadium = 'Somerset Park'\n", "\n", "Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.\n", "### Question: What is the number & name with an Undergoing overhaul, restoration or repairs date?\n", "### Input: Table 2-11913905-6 has columns Number & Name (text),Description (text),Livery (text),Owner(s) (text),Date (text). \n", "### Answer: SELECT Number & Name FROM 2-11913905-6 WHERE Date = 'undergoing overhaul, restoration or repairs'\n", "\n", "Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.\n", "### Question: What year did Orlando have a School/Club team in Clemson?\n", "### Input: Table 2-15621965-7 has columns Player (text),Nationality (text),Position (text),Years in Orlando (text),School/Club Team (text). \n", "### Answer: SELECT Years in Orlando FROM 2-15621965-7 WHERE School/Club Team = 'clemson'\n", "\n", "Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.\n", "### Question: How many Deaths have a Fate of damaged, and a Tonnage (GRT) smaller than 4,917?\n", "### Input: Table 2-18914307-1 has columns Date (text),Ship Name (text),Flag (text),Tonnage ( GRT ) (real),Fate (text),Deaths (real). \n", "### Answer: SELECT COUNT Deaths FROM 2-18914307-1 WHERE Fate = 'damaged' AND Tonnage ( GRT ) < 4,917\n" ] } ], "source": [ "import random\n", "import json\n", "\n", "# defined by WikiSQL\n", "\n", "agg_ops = ['', 'MAX', 'MIN', 'COUNT', 'SUM', 'AVG']\n", "cond_ops = ['=', '>', '<', 'OP']\n", "syms = ['SELECT', 'WHERE', 'AND', 'COL', 'TABLE', 'CAPTION', 'PAGE', 'SECTION', 'OP', 'COND', 'QUESTION', 'AGG', 'AGGOPS', 'CONDOPS']\n", "\n", "def fix_repr(d,cols,types,tid):\n", " sel_index=d['sel'] \n", " agg_index=d['agg']\n", " conditions=d['conds']\n", " col = cols[sel_index]\n", " rep = 'SELECT {agg} {sel} FROM {tid}'.format(\n", " agg=agg_ops[agg_index],\n", " sel=col,\n", " tid=tid\n", " )\n", " if conditions:\n", " cs = []\n", " for i, o, v in conditions:\n", " #print(i,cols)\n", " nm = cols[i]\n", " op = cond_ops[o]\n", " \n", " if types[i] in ['text']:\n", " val = f\"\\'{v}\\'\"\n", " else:\n", " val = v\n", " cs.append(f'{nm} {op} {val}')\n", " #print(cs)\n", "\n", " rep += ' WHERE ' + ' AND '.join(cs)\n", " \n", " return rep\n", "\n", "tbl_cols = {}\n", "tbl_types = {}\n", "tbl_str = {}\n", "\n", "prefix = 'Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.'\n", "\n", "def tbl_def_to_string(id, header, types):\n", " ht = [f'{header[i]} ({types[i]})' for i in range(len(header))]\n", " s = f'\\n### Input: Table {id} has columns ' + ','.join(ht) + '. '\n", " return s\n", "\n", "with open('data/train.tables.jsonl') as f:\n", " for line in f:\n", " js = json.loads(line)\n", " id = js['id']\n", " hdr = js['header']\n", " ts = js['types']\n", " tbl_str[id] = tbl_def_to_string(id,hdr,ts)\n", " tbl_cols[id] = hdr\n", " tbl_types[id] = ts\n", "\n", "\n", "nl_q = []\n", "sql_a = []\n", "\n", "with open('data/train.jsonl') as f:\n", " for line in f:\n", " js = json.loads(line)\n", " id = js['table_id']\n", " s = tbl_str[id]\n", " qst = js['question']\n", " nl = prefix + \"\\n### Question: \" + qst + s\n", " nl_q.append(nl)\n", "\n", " sql = js['sql']\n", " a = fix_repr(sql,tbl_cols[id],tbl_types[id],id)\n", " a = '\\n### Answer: ' + a\n", " sql_a.append(a)\n", "\n", "\n", "M = len(nl_q)\n", "\n", "data_txt = [nl_q[i] + sql_a[i] for i in range(len(nl_q))]\n", "\n", "for i in range(5):\n", " j = random.randint(0,M-1)\n", " print()\n", " print(data_txt[j]) \n", " \n", " " ] }, { "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ "Set up the details for the model." ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [ { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "431b36f60a3940cf8646e1bea4324745", "version_major": 2, "version_minor": 0 }, "text/plain": [ "Map: 0%| | 0/56355 [00:00, ? examples/s]" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "from peft import LoraConfig, get_peft_model\n", "import transformers\n", "import datasets\n", "\n", "LORA_R = 4\n", "LORA_ALPHA = 16\n", "LORA_DROPOUT = .1\n", "CUTOFF_LEN = 256\n", "BATCH = 128\n", "MICRO_BATCH = 4\n", "N_GAS = BATCH//MICRO_BATCH\n", "EPOCHS = 1\n", "LR = 1e-5\n", "\n", "lora_cfg = LoraConfig(\n", " r = LORA_R,\n", " lora_alpha=LORA_ALPHA,\n", " lora_dropout=LORA_DROPOUT,\n", " task_type='CASUAL_LM',\n", " target_modules=['q_proj','v_proj']\n", ")\n", "\n", "model = get_peft_model(model,lora_cfg)\n", "\n", "tokenizer.pad_token_id = 0\n", "\n", "d = {'prompt': data_txt}\n", "\n", "data = datasets.Dataset.from_dict(d)\n", "data = data.map(lambda x:\n", " tokenizer(\n", " x['prompt'],\n", " truncation=True,\n", " max_length=CUTOFF_LEN,\n", " padding=\"max_length\"\n", " ))\n", "\n", "data = data.remove_columns('prompt')\n", "\n", "targs = transformers.TrainingArguments(\n", " per_device_train_batch_size=MICRO_BATCH,\n", " gradient_accumulation_steps=N_GAS,\n", " warmup_steps=0,\n", " num_train_epochs=EPOCHS,\n", " learning_rate=LR,\n", " fp16=True,\n", " logging_steps=1,\n", " output_dir='sqllama-out',\n", " save_total_limit=3,\n", " remove_unused_columns=False\n", ")\n", "\n", "\n", "model.config.use_cache = False" ] }, { "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ "ignore - just trying to figure out huggingface datasets" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Dataset({\n", " features: ['input_ids', 'attention_mask'],\n", " num_rows: 56355\n", "})\n", "{'input_ids': [0, 13866, 338, 263, 1139, 393, 16612, 263, 848, 2009, 29892, 3300, 2859, 411, 385, 1881, 393, 16612, 263, 3758, 1591, 29889, 29871, 14350, 263, 3758, 2346, 393, 5663, 17180, 278, 848, 29889, 13, 2277, 29937, 894, 29901, 24948, 592, 825, 278, 11486, 526, 363, 4275, 8314, 29871, 13, 2277, 29937, 10567, 29901, 6137, 29871, 29896, 29899, 29896, 29900, 29900, 29900, 29896, 29947, 29896, 29899, 29896, 756, 4341, 4306, 29914, 357, 768, 706, 313, 726, 511, 1626, 29914, 7042, 12384, 313, 726, 511, 5809, 313, 726, 511, 7583, 269, 1188, 273, 313, 726, 511, 7583, 3652, 313, 726, 511, 3664, 267, 313, 726, 467, 259, 13, 2277, 29937, 673, 29901, 5097, 29871, 8695, 3895, 29871, 29896, 29899, 29896, 29900, 29900, 29900, 29896, 29947, 29896, 29899, 29896, 5754, 9626, 269, 1188, 273, 353, 525, 6156, 2692, 29950, 319, 29965, 10810, 1964, 10764, 29915, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 'attention_mask': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]}\n" ] } ], "source": [ "print(data)\n", "print(data[0])\n", "\n", "#from datasets import load_dataset\n", "\n", "\n", "#!git clone https://github.com/tloen/alpaca-lora.git\n", "#dalp = load_dataset(\"json\", data_files=\"alpaca-lora/alpaca_data.json\")\n", "#print(dalp)\n", "\n", "#dalp = dalp.map(lambda x : {'blah':'blah'})\n", "#print(dalp)" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "data": { "text/html": [ "\n", "
Step | \n", "Training Loss | \n", "
---|---|
1 | \n", "2.517200 | \n", "
2 | \n", "2.482300 | \n", "
3 | \n", "2.444100 | \n", "
4 | \n", "2.456500 | \n", "
5 | \n", "2.441400 | \n", "
6 | \n", "2.484600 | \n", "
7 | \n", "2.424000 | \n", "
8 | \n", "2.477900 | \n", "
9 | \n", "2.429700 | \n", "
10 | \n", "2.436000 | \n", "
11 | \n", "2.422000 | \n", "
12 | \n", "2.408800 | \n", "
13 | \n", "2.402900 | \n", "
14 | \n", "2.424500 | \n", "
15 | \n", "2.421800 | \n", "
16 | \n", "2.424100 | \n", "
17 | \n", "2.404000 | \n", "
18 | \n", "2.386900 | \n", "
19 | \n", "2.414400 | \n", "
20 | \n", "2.370600 | \n", "
21 | \n", "2.382500 | \n", "
22 | \n", "2.350700 | \n", "
23 | \n", "2.385700 | \n", "
24 | \n", "2.350400 | \n", "
25 | \n", "2.354900 | \n", "
26 | \n", "2.345400 | \n", "
27 | \n", "2.373000 | \n", "
28 | \n", "2.343200 | \n", "
29 | \n", "2.374300 | \n", "
30 | \n", "2.325000 | \n", "
31 | \n", "2.352000 | \n", "
32 | \n", "2.344600 | \n", "
33 | \n", "2.360000 | \n", "
34 | \n", "2.347400 | \n", "
35 | \n", "2.346700 | \n", "
36 | \n", "2.329000 | \n", "
37 | \n", "2.314600 | \n", "
38 | \n", "2.306000 | \n", "
39 | \n", "2.292600 | \n", "
40 | \n", "2.333800 | \n", "
41 | \n", "2.311500 | \n", "
42 | \n", "2.308300 | \n", "
43 | \n", "2.287400 | \n", "
44 | \n", "2.314100 | \n", "
45 | \n", "2.280400 | \n", "
46 | \n", "2.261300 | \n", "
47 | \n", "2.274200 | \n", "
48 | \n", "2.246900 | \n", "
49 | \n", "2.257100 | \n", "
50 | \n", "2.274500 | \n", "
51 | \n", "2.245500 | \n", "
52 | \n", "2.250700 | \n", "
53 | \n", "2.296600 | \n", "
54 | \n", "2.261000 | \n", "
55 | \n", "2.223800 | \n", "
56 | \n", "2.244000 | \n", "
57 | \n", "2.228500 | \n", "
58 | \n", "2.229100 | \n", "
59 | \n", "2.162300 | \n", "
60 | \n", "2.238000 | \n", "
61 | \n", "2.246000 | \n", "
62 | \n", "2.184800 | \n", "
63 | \n", "2.195000 | \n", "
64 | \n", "2.199500 | \n", "
65 | \n", "2.180000 | \n", "
66 | \n", "2.179800 | \n", "
67 | \n", "2.149700 | \n", "
68 | \n", "2.177000 | \n", "
69 | \n", "2.156600 | \n", "
70 | \n", "2.193400 | \n", "
71 | \n", "2.163400 | \n", "
72 | \n", "2.147400 | \n", "
73 | \n", "2.134700 | \n", "
74 | \n", "2.133200 | \n", "
75 | \n", "2.118000 | \n", "
76 | \n", "2.139000 | \n", "
77 | \n", "2.102000 | \n", "
78 | \n", "2.109100 | \n", "
79 | \n", "2.099000 | \n", "
80 | \n", "2.097500 | \n", "
81 | \n", "2.073200 | \n", "
82 | \n", "2.055200 | \n", "
83 | \n", "2.078100 | \n", "
84 | \n", "2.104800 | \n", "
85 | \n", "2.061100 | \n", "
86 | \n", "2.066500 | \n", "
87 | \n", "2.073500 | \n", "
88 | \n", "2.010500 | \n", "
89 | \n", "2.045700 | \n", "
90 | \n", "2.026700 | \n", "
91 | \n", "2.046500 | \n", "
92 | \n", "2.015300 | \n", "
93 | \n", "2.019100 | \n", "
94 | \n", "2.008600 | \n", "
95 | \n", "1.961000 | \n", "
96 | \n", "1.974300 | \n", "
97 | \n", "1.991700 | \n", "
98 | \n", "1.984700 | \n", "
99 | \n", "1.975900 | \n", "
100 | \n", "1.963900 | \n", "
101 | \n", "1.934300 | \n", "
102 | \n", "1.990400 | \n", "
103 | \n", "1.914900 | \n", "
104 | \n", "1.956100 | \n", "
105 | \n", "1.943400 | \n", "
106 | \n", "1.931000 | \n", "
107 | \n", "1.919000 | \n", "
108 | \n", "1.912800 | \n", "
109 | \n", "1.920400 | \n", "
110 | \n", "1.878300 | \n", "
111 | \n", "1.890800 | \n", "
112 | \n", "1.881900 | \n", "
113 | \n", "1.885400 | \n", "
114 | \n", "1.908400 | \n", "
115 | \n", "1.871200 | \n", "
116 | \n", "1.900000 | \n", "
117 | \n", "1.888000 | \n", "
118 | \n", "1.875100 | \n", "
119 | \n", "1.855000 | \n", "
120 | \n", "1.852100 | \n", "
121 | \n", "1.851200 | \n", "
122 | \n", "1.821800 | \n", "
123 | \n", "1.853000 | \n", "
124 | \n", "1.854700 | \n", "
125 | \n", "1.806900 | \n", "
126 | \n", "1.845300 | \n", "
127 | \n", "1.797800 | \n", "
128 | \n", "1.795300 | \n", "
129 | \n", "1.799500 | \n", "
130 | \n", "1.853900 | \n", "
131 | \n", "1.780100 | \n", "
132 | \n", "1.789400 | \n", "
133 | \n", "1.776700 | \n", "
134 | \n", "1.747300 | \n", "
135 | \n", "1.753700 | \n", "
136 | \n", "1.761300 | \n", "
137 | \n", "1.725500 | \n", "
138 | \n", "1.710800 | \n", "
139 | \n", "1.733500 | \n", "
140 | \n", "1.727000 | \n", "
141 | \n", "1.744300 | \n", "
142 | \n", "1.728900 | \n", "
143 | \n", "1.725100 | \n", "
144 | \n", "1.708000 | \n", "
145 | \n", "1.709000 | \n", "
146 | \n", "1.704600 | \n", "
147 | \n", "1.684600 | \n", "
148 | \n", "1.676100 | \n", "
149 | \n", "1.682800 | \n", "
150 | \n", "1.669900 | \n", "
151 | \n", "1.636400 | \n", "
152 | \n", "1.671500 | \n", "
153 | \n", "1.673200 | \n", "
154 | \n", "1.644300 | \n", "
155 | \n", "1.620800 | \n", "
156 | \n", "1.617500 | \n", "
157 | \n", "1.647700 | \n", "
158 | \n", "1.629300 | \n", "
159 | \n", "1.608800 | \n", "
160 | \n", "1.633000 | \n", "
161 | \n", "1.618200 | \n", "
162 | \n", "1.634300 | \n", "
163 | \n", "1.588400 | \n", "
164 | \n", "1.581100 | \n", "
165 | \n", "1.584500 | \n", "
166 | \n", "1.594800 | \n", "
167 | \n", "1.563800 | \n", "
168 | \n", "1.576900 | \n", "
169 | \n", "1.546300 | \n", "
170 | \n", "1.569800 | \n", "
171 | \n", "1.592300 | \n", "
172 | \n", "1.537800 | \n", "
173 | \n", "1.519200 | \n", "
174 | \n", "1.512100 | \n", "
175 | \n", "1.581500 | \n", "
176 | \n", "1.534500 | \n", "
177 | \n", "1.509400 | \n", "
178 | \n", "1.521300 | \n", "
179 | \n", "1.528500 | \n", "
180 | \n", "1.494300 | \n", "
181 | \n", "1.495000 | \n", "
182 | \n", "1.499700 | \n", "
183 | \n", "1.461300 | \n", "
184 | \n", "1.469200 | \n", "
185 | \n", "1.495200 | \n", "
186 | \n", "1.467400 | \n", "
187 | \n", "1.437000 | \n", "
188 | \n", "1.463000 | \n", "
189 | \n", "1.437900 | \n", "
190 | \n", "1.467400 | \n", "
191 | \n", "1.472300 | \n", "
192 | \n", "1.434000 | \n", "
193 | \n", "1.411500 | \n", "
194 | \n", "1.432500 | \n", "
195 | \n", "1.459800 | \n", "
196 | \n", "1.431900 | \n", "
197 | \n", "1.456200 | \n", "
198 | \n", "1.394800 | \n", "
199 | \n", "1.422700 | \n", "
200 | \n", "1.412800 | \n", "
201 | \n", "1.413800 | \n", "
202 | \n", "1.380000 | \n", "
203 | \n", "1.407400 | \n", "
204 | \n", "1.406200 | \n", "
205 | \n", "1.396100 | \n", "
206 | \n", "1.407100 | \n", "
207 | \n", "1.379600 | \n", "
208 | \n", "1.360600 | \n", "
209 | \n", "1.395100 | \n", "
210 | \n", "1.352500 | \n", "
211 | \n", "1.358900 | \n", "
212 | \n", "1.369100 | \n", "
213 | \n", "1.342600 | \n", "
214 | \n", "1.358900 | \n", "
215 | \n", "1.320300 | \n", "
216 | \n", "1.355700 | \n", "
217 | \n", "1.315700 | \n", "
218 | \n", "1.348800 | \n", "
219 | \n", "1.319800 | \n", "
220 | \n", "1.336500 | \n", "
221 | \n", "1.339600 | \n", "
222 | \n", "1.319500 | \n", "
223 | \n", "1.319600 | \n", "
224 | \n", "1.330200 | \n", "
225 | \n", "1.271700 | \n", "
226 | \n", "1.317300 | \n", "
227 | \n", "1.287400 | \n", "
228 | \n", "1.283300 | \n", "
229 | \n", "1.280500 | \n", "
230 | \n", "1.274200 | \n", "
231 | \n", "1.297000 | \n", "
232 | \n", "1.266400 | \n", "
233 | \n", "1.253100 | \n", "
234 | \n", "1.273100 | \n", "
235 | \n", "1.293300 | \n", "
236 | \n", "1.293000 | \n", "
237 | \n", "1.273500 | \n", "
238 | \n", "1.253100 | \n", "
239 | \n", "1.257700 | \n", "
240 | \n", "1.232500 | \n", "
241 | \n", "1.233100 | \n", "
242 | \n", "1.226000 | \n", "
243 | \n", "1.218400 | \n", "
244 | \n", "1.222800 | \n", "
245 | \n", "1.232100 | \n", "
246 | \n", "1.214800 | \n", "
247 | \n", "1.205700 | \n", "
248 | \n", "1.228400 | \n", "
249 | \n", "1.202600 | \n", "
250 | \n", "1.207700 | \n", "
251 | \n", "1.205800 | \n", "
252 | \n", "1.198400 | \n", "
253 | \n", "1.207800 | \n", "
254 | \n", "1.198600 | \n", "
255 | \n", "1.201700 | \n", "
256 | \n", "1.195500 | \n", "
257 | \n", "1.190500 | \n", "
258 | \n", "1.197100 | \n", "
259 | \n", "1.165100 | \n", "
260 | \n", "1.173200 | \n", "
261 | \n", "1.163400 | \n", "
262 | \n", "1.191500 | \n", "
263 | \n", "1.173700 | \n", "
264 | \n", "1.134400 | \n", "
265 | \n", "1.165500 | \n", "
266 | \n", "1.134800 | \n", "
267 | \n", "1.149500 | \n", "
268 | \n", "1.173100 | \n", "
269 | \n", "1.137000 | \n", "
270 | \n", "1.171200 | \n", "
271 | \n", "1.120600 | \n", "
272 | \n", "1.147600 | \n", "
273 | \n", "1.128300 | \n", "
274 | \n", "1.150300 | \n", "
275 | \n", "1.147700 | \n", "
276 | \n", "1.150200 | \n", "
277 | \n", "1.106900 | \n", "
278 | \n", "1.145400 | \n", "
279 | \n", "1.117300 | \n", "
280 | \n", "1.121900 | \n", "
281 | \n", "1.139400 | \n", "
282 | \n", "1.109100 | \n", "
283 | \n", "1.142100 | \n", "
284 | \n", "1.117300 | \n", "
285 | \n", "1.104200 | \n", "
286 | \n", "1.134200 | \n", "
287 | \n", "1.100400 | \n", "
288 | \n", "1.092100 | \n", "
289 | \n", "1.120500 | \n", "
290 | \n", "1.088100 | \n", "
291 | \n", "1.128600 | \n", "
292 | \n", "1.105400 | \n", "
293 | \n", "1.094000 | \n", "
294 | \n", "1.108900 | \n", "
295 | \n", "1.073100 | \n", "
296 | \n", "1.100900 | \n", "
297 | \n", "1.092400 | \n", "
298 | \n", "1.090300 | \n", "
299 | \n", "1.079400 | \n", "
300 | \n", "1.090300 | \n", "
301 | \n", "1.086100 | \n", "
302 | \n", "1.080300 | \n", "
303 | \n", "1.075600 | \n", "
304 | \n", "1.075900 | \n", "
305 | \n", "1.092200 | \n", "
306 | \n", "1.070600 | \n", "
307 | \n", "1.068800 | \n", "
308 | \n", "1.071300 | \n", "
309 | \n", "1.073900 | \n", "
310 | \n", "1.055400 | \n", "
311 | \n", "1.067900 | \n", "
312 | \n", "1.041000 | \n", "
313 | \n", "1.048600 | \n", "
314 | \n", "1.072600 | \n", "
315 | \n", "1.058800 | \n", "
316 | \n", "1.039000 | \n", "
317 | \n", "1.072300 | \n", "
318 | \n", "1.056600 | \n", "
319 | \n", "1.035100 | \n", "
320 | \n", "1.052800 | \n", "
321 | \n", "1.046700 | \n", "
322 | \n", "1.073400 | \n", "
323 | \n", "1.054000 | \n", "
324 | \n", "1.077100 | \n", "
325 | \n", "1.035200 | \n", "
326 | \n", "1.027700 | \n", "
327 | \n", "1.060000 | \n", "
328 | \n", "1.048900 | \n", "
329 | \n", "1.040000 | \n", "
330 | \n", "1.026900 | \n", "
331 | \n", "1.049300 | \n", "
332 | \n", "1.017100 | \n", "
333 | \n", "0.996200 | \n", "
334 | \n", "1.006400 | \n", "
335 | \n", "1.026700 | \n", "
336 | \n", "1.073700 | \n", "
337 | \n", "1.039200 | \n", "
338 | \n", "1.041100 | \n", "
339 | \n", "1.054300 | \n", "
340 | \n", "1.013500 | \n", "
341 | \n", "1.024900 | \n", "
342 | \n", "1.003300 | \n", "
343 | \n", "0.993400 | \n", "
344 | \n", "1.037300 | \n", "
345 | \n", "1.009300 | \n", "
346 | \n", "1.030400 | \n", "
347 | \n", "1.001400 | \n", "
348 | \n", "1.012100 | \n", "
349 | \n", "1.027300 | \n", "
350 | \n", "1.012700 | \n", "
351 | \n", "1.013400 | \n", "
352 | \n", "1.004400 | \n", "
353 | \n", "1.024800 | \n", "
354 | \n", "0.990700 | \n", "
355 | \n", "1.048600 | \n", "
356 | \n", "0.992700 | \n", "
357 | \n", "0.991800 | \n", "
358 | \n", "0.985300 | \n", "
359 | \n", "1.019100 | \n", "
360 | \n", "1.007300 | \n", "
361 | \n", "1.025500 | \n", "
362 | \n", "0.999100 | \n", "
363 | \n", "0.997900 | \n", "
364 | \n", "1.013300 | \n", "
365 | \n", "1.014700 | \n", "
366 | \n", "1.037700 | \n", "
367 | \n", "0.992400 | \n", "
368 | \n", "0.988800 | \n", "
369 | \n", "0.993900 | \n", "
370 | \n", "0.999500 | \n", "
371 | \n", "0.973000 | \n", "
372 | \n", "0.972200 | \n", "
373 | \n", "0.989200 | \n", "
374 | \n", "0.994500 | \n", "
375 | \n", "0.995800 | \n", "
376 | \n", "0.992000 | \n", "
377 | \n", "0.977800 | \n", "
378 | \n", "0.975700 | \n", "
379 | \n", "0.973700 | \n", "
380 | \n", "0.986200 | \n", "
381 | \n", "1.008000 | \n", "
382 | \n", "0.954100 | \n", "
383 | \n", "1.015900 | \n", "
384 | \n", "1.008200 | \n", "
385 | \n", "0.974700 | \n", "
386 | \n", "0.987500 | \n", "
387 | \n", "0.993700 | \n", "
388 | \n", "0.999200 | \n", "
389 | \n", "1.000700 | \n", "
390 | \n", "0.978600 | \n", "
391 | \n", "0.956200 | \n", "
392 | \n", "1.001600 | \n", "
393 | \n", "0.971300 | \n", "
394 | \n", "0.965800 | \n", "
395 | \n", "0.981000 | \n", "
396 | \n", "0.965400 | \n", "
397 | \n", "0.974200 | \n", "
398 | \n", "0.970700 | \n", "
399 | \n", "0.953500 | \n", "
400 | \n", "0.979700 | \n", "
401 | \n", "0.957700 | \n", "
402 | \n", "0.984600 | \n", "
403 | \n", "1.015600 | \n", "
404 | \n", "0.976800 | \n", "
405 | \n", "0.969100 | \n", "
406 | \n", "0.974200 | \n", "
407 | \n", "0.983300 | \n", "
408 | \n", "0.974300 | \n", "
409 | \n", "0.980600 | \n", "
410 | \n", "0.986300 | \n", "
411 | \n", "0.968100 | \n", "
412 | \n", "0.980500 | \n", "
413 | \n", "0.976200 | \n", "
414 | \n", "0.987300 | \n", "
415 | \n", "0.971600 | \n", "
416 | \n", "0.985200 | \n", "
417 | \n", "0.989800 | \n", "
418 | \n", "0.972000 | \n", "
419 | \n", "0.971100 | \n", "
420 | \n", "0.988800 | \n", "
421 | \n", "0.965600 | \n", "
422 | \n", "1.020400 | \n", "
423 | \n", "0.978000 | \n", "
424 | \n", "0.987800 | \n", "
425 | \n", "0.953700 | \n", "
426 | \n", "0.990400 | \n", "
427 | \n", "0.982900 | \n", "
428 | \n", "0.989100 | \n", "
429 | \n", "0.983800 | \n", "
430 | \n", "0.981500 | \n", "
431 | \n", "0.966900 | \n", "
432 | \n", "0.967300 | \n", "
433 | \n", "0.999400 | \n", "
434 | \n", "0.973100 | \n", "
435 | \n", "0.980500 | \n", "
436 | \n", "0.995500 | \n", "
437 | \n", "0.960300 | \n", "
438 | \n", "0.953700 | \n", "
439 | \n", "0.993600 | \n", "
440 | \n", "0.965100 | \n", "
"
],
"text/plain": [
"