From aba7ed2eb1fce4ebbca28eeed11ab19687cb1764 Mon Sep 17 00:00:00 2001 From: Sebastian Raschka Date: Sat, 15 Jun 2024 15:10:01 -0500 Subject: [PATCH] Updated ch07 (#213) * Updated ch07 * fix links * check links --- .github/workflows/check-links.yml | 2 +- ch07/01_main-chapter-code/ch07.ipynb | 1454 +++++++++++++---- .../instruction-data-with-response.json | 662 ++++++++ .../llm-instruction-eval-ollama.ipynb | 20 +- 4 files changed, 1793 insertions(+), 345 deletions(-) create mode 100644 ch07/01_main-chapter-code/instruction-data-with-response.json diff --git a/.github/workflows/check-links.yml b/.github/workflows/check-links.yml index b98e036..9d987cf 100644 --- a/.github/workflows/check-links.yml +++ b/.github/workflows/check-links.yml @@ -27,4 +27,4 @@ jobs: - name: Check links run: | - pytest --check-links ./ --check-links-ignore "https://platform.openai.com/*" \ No newline at end of file + pytest --check-links ./ --check-links-ignore "https://platform.openai.com/*" --check-links-ignore "https://arena.lmsys.org" \ No newline at end of file diff --git a/ch07/01_main-chapter-code/ch07.ipynb b/ch07/01_main-chapter-code/ch07.ipynb index 110d916..afd8637 100644 --- a/ch07/01_main-chapter-code/ch07.ipynb +++ b/ch07/01_main-chapter-code/ch07.ipynb @@ -41,7 +41,7 @@ "base_uri": "https://localhost:8080/" }, "id": "4e19327b-6c02-4881-ad02-9b6d3ec0b1b4", - "outputId": "6560a9ce-8cbe-4c37-885b-e9c8c1946f69" + "outputId": "5e54624b-a877-48c1-833e-1533ea0677db" }, "outputs": [ { @@ -77,7 +77,7 @@ "id": "264fca98-2f9a-4193-b435-2abfa3b4142f" }, "source": [ - "[figure]" + "" ] }, { @@ -99,7 +99,7 @@ "source": [ "- In chapter 5, we saw that pretraining an LLM involves a training procedure where it learns to generate one word at a time\n", "- Hence, a pretrained LLM is good at text completion, but it is not good at following instructions\n", - "- In this chapter, we teach the LLM to better follow instructions" + "- In this chapter, we teach the LLM to follow instructions better" ] }, { @@ -109,7 +109,7 @@ "id": "18dc0535-0904-44ed-beaf-9b678292ef35" }, "source": [ - "[figure]" + "" ] }, { @@ -119,11 +119,9 @@ "id": "b4698b23-12e0-4bd7-a140-ccb3dd71d4e8" }, "source": [ - "- An optional step after instruction finetuning is preference tuning, which refines the response style of an LLM; readers interested in preference tuning can find example code in the bonus materials: [../04_preference-tuning-with-dpo](../04_preference-tuning-with-dpo)\n", - "\n", "- The topics covered in this chapter are summarized in the figure below\n", "\n", - "[figure]" + "" ] }, { @@ -155,7 +153,7 @@ "base_uri": "https://localhost:8080/" }, "id": "0G3axLw6kY1N", - "outputId": "c48ade8c-0d31-4efb-8246-6e6c51669dde" + "outputId": "f8037e64-eced-4e21-d104-b34d432215bf" }, "outputs": [ { @@ -203,7 +201,7 @@ "id": "d7af8176-4255-4e92-8c7d-998771733eb8" }, "source": [ - "- Each item in the `data` list we loaded from the JSON file above is a dictionary in the following form:" + "- Each item in the `data` list we loaded from the JSON file above is a dictionary in the following form" ] }, { @@ -215,7 +213,7 @@ "base_uri": "https://localhost:8080/" }, "id": "-LiuBMsHkzQV", - "outputId": "88fe5be1-da18-45b5-dbb5-abcbcc4558e5" + "outputId": "ea9e812f-d7ef-49ec-aca0-15fe11594609" }, "outputs": [ { @@ -251,7 +249,7 @@ "base_uri": "https://localhost:8080/" }, "id": "uFInFxDDk2Je", - "outputId": "a07ca278-0205-4ac4-b81e-54a513ece585" + "outputId": "e8caef4a-8b44-4c4e-96da-19b27eaf3e48" }, "outputs": [ { @@ -286,7 +284,7 @@ "id": "dffa4f70-44d4-4be4-89a9-2159f4885b10" }, "source": [ - "[figure]" + "" ] }, { @@ -297,7 +295,7 @@ }, "source": [ "- In this chapter, we use Alpaca-style prompt formatting, which was the original prompt template for instruction finetuning\n", - "- Below we format the input that we will pass as input to the LLM" + "- Below, we format the input that we will pass as input to the LLM" ] }, { @@ -340,7 +338,7 @@ "base_uri": "https://localhost:8080/" }, "id": "F9UQRfjzo4Js", - "outputId": "f05669d2-13a8-4eb3-f549-dab83cec1e00" + "outputId": "ceae9231-24a9-4f33-8c1e-0e6842bd3064" }, "outputs": [ { @@ -374,7 +372,7 @@ "id": "4dc93ddf-431c-49c0-96f2-fb3a79c4d94c" }, "source": [ - "- Below is a formatted response without input field" + "- Below is a formatted response without an input field" ] }, { @@ -386,7 +384,7 @@ "base_uri": "https://localhost:8080/" }, "id": "a3891fa9-f738-41cd-946c-80ef9a99c346", - "outputId": "b9550b1f-8b35-4b00-96d3-a1ce2b76daee" + "outputId": "f6439a50-1b0e-49ea-ecad-442a688121c7" }, "outputs": [ { @@ -447,7 +445,7 @@ "base_uri": "https://localhost:8080/" }, "id": "-zf6oht6bIUQ", - "outputId": "5a11a57f-2ce2-408f-e05a-a09cb661e49b" + "outputId": "107dd9b9-03cb-405d-f758-a7e42823bebc" }, "outputs": [ { @@ -473,7 +471,7 @@ "id": "fcaaf606-f913-4445-8301-632ae10d387d" }, "source": [ - "## 7.3 Creating data loaders for an instruction dataset" + "## 7.3 Organizing data into training batches" ] }, { @@ -483,7 +481,19 @@ "id": "233f63bd-9755-4d07-8884-5e2e5345cf27" }, "source": [ - "[figure]" + "" + ] + }, + { + "cell_type": "markdown", + "id": "c149fc1a-7757-4ec8-80cb-e2a3fb007a2c", + "metadata": { + "id": "c149fc1a-7757-4ec8-80cb-e2a3fb007a2c" + }, + "source": [ + "- We tackle this dataset batching in several steps, as summarized in the figure below\n", + "\n", + "" ] }, { @@ -495,7 +505,7 @@ "source": [ "- First, we implement an `InstructionDataset` class that pre-tokenizes all inputs in the dataset, similar to the `SpamDataset` in chapter 6\n", "\n", - "[figure]" + "" ] }, { @@ -546,6 +556,17 @@ " return len(self.data)" ] }, + { + "cell_type": "markdown", + "id": "384f0e69-4b22-41c0-a25d-f077527eddd1", + "metadata": { + "id": "384f0e69-4b22-41c0-a25d-f077527eddd1" + }, + "source": [ + "- Similar to chapter 6, we want to collect multiple training examples in a batch to accelerate training; this requires padding all inputs to a similar length\n", + "- Also similar to the previous chapter, we use the `<|endoftext|>` token as a padding token" + ] + }, { "cell_type": "code", "execution_count": 12, @@ -555,7 +576,7 @@ "base_uri": "https://localhost:8080/" }, "id": "ff24fe1a-5746-461c-ad3d-b6d84a1a7c96", - "outputId": "7459dd6d-aaad-49c5-9c82-db9b50358c77" + "outputId": "79dd4d77-00fc-4072-9582-cd1218fd37f0" }, "outputs": [ { @@ -570,110 +591,236 @@ "print(tokenizer.encode(\"<|endoftext|>\", allowed_special={\"<|endoftext|>\"}))" ] }, + { + "cell_type": "markdown", + "id": "9e5bd7bc-f347-4cf8-a0c2-94cb8799e427", + "metadata": { + "id": "9e5bd7bc-f347-4cf8-a0c2-94cb8799e427" + }, + "source": [ + "- In chapter 6, we padded all examples in a dataset to the same length\n", + " - Here, we take a more sophisticated approach and develop a custom \"collate\" function that we can pass to the data loader\n", + " - This custom collate function pads the training examples in each batch to have the same length (but different batches can have different lengths)" + ] + }, + { + "cell_type": "markdown", + "id": "65c4d943-4aa8-4a44-874e-05bc6831fbd3", + "metadata": { + "id": "65c4d943-4aa8-4a44-874e-05bc6831fbd3" + }, + "source": [ + "" + ] + }, { "cell_type": "code", "execution_count": 13, - "id": "W2jvh-OP9MFV", + "id": "eb4c77dd-c956-4a1b-897b-b466909f18ca", "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "W2jvh-OP9MFV", - "outputId": "b3f94569-8997-461b-909e-b469e0b3c089" + "id": "eb4c77dd-c956-4a1b-897b-b466909f18ca" }, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor(1.1269)" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ - "# Explain index masking\n", + "def custom_collate_draft_1(\n", + " batch,\n", + " pad_token_id=50256,\n", + " device=\"cpu\"\n", + "):\n", + " # Find the longest sequence in the batch\n", + " batch_max_length = max(len(item)+1 for item in batch)\n", "\n", - "targets = torch.tensor([0, 1])\n", - "inputs = torch.tensor(\n", - " [[-1., 1.],\n", - " [-0.5, 1.5]]\n", - ")\n", + " # Pad and prepare inputs\n", + " inputs_lst = []\n", "\n", - "torch.nn.functional.cross_entropy(inputs, targets)" + " for item in batch:\n", + " new_item = item.copy()\n", + " # Add an <|endoftext|> token\n", + " new_item += [pad_token_id]\n", + " # Pad sequences to max_length\n", + " # this always adds at least 1 additional padding tokens\n", + " padded = new_item + [pad_token_id] * (batch_max_length - len(new_item))\n", + " # We remove this extra padded token again here\n", + " inputs = torch.tensor(padded[:-1])\n", + " inputs_lst.append(inputs)\n", + "\n", + " # Convert list of inputs to tensor and transfer to target device\n", + " inputs_tensor = torch.stack(inputs_lst).to(device)\n", + " return inputs_tensor" ] }, { "cell_type": "code", "execution_count": 14, - "id": "nvVMuil89v9N", + "id": "8fb02373-59b3-4f3a-b1d1-8181a2432645", "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, - "id": "nvVMuil89v9N", - "outputId": "5d9f0948-ddc2-4766-c2ba-c14ca550e9d1" + "id": "8fb02373-59b3-4f3a-b1d1-8181a2432645", + "outputId": "a0fa921e-f3f5-4842-b33c-d9ddf021977b" }, "outputs": [ { - "data": { - "text/plain": [ - "tensor(0.7936)" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([[ 0, 1, 2, 3, 4],\n", + " [ 5, 6, 50256, 50256, 50256],\n", + " [ 7, 8, 9, 50256, 50256]])\n" + ] } ], "source": [ - "targets = torch.tensor([0, 1, 1])\n", - "inputs = torch.tensor(\n", - " [[-1., 1.],\n", - " [-0.5, 1.5],\n", - " [-0.5, 1.5]]\n", + "inputs_1 = [0, 1, 2, 3, 4]\n", + "inputs_2 = [5, 6]\n", + "inputs_3 = [7, 8, 9]\n", + "\n", + "batch = (\n", + " inputs_1,\n", + " inputs_2,\n", + " inputs_3\n", ")\n", - "torch.nn.functional.cross_entropy(inputs, targets)" + "\n", + "print(custom_collate_draft_1(batch))" + ] + }, + { + "cell_type": "markdown", + "id": "c46832ab-39b7-45f8-b330-ac9adfa10d1b", + "metadata": { + "id": "c46832ab-39b7-45f8-b330-ac9adfa10d1b" + }, + "source": [ + "" + ] + }, + { + "cell_type": "markdown", + "id": "17769a19-b961-4213-92ef-34f441b2d1d6", + "metadata": { + "id": "17769a19-b961-4213-92ef-34f441b2d1d6" + }, + "source": [ + "- Above, we only returned the inputs to the LLM; however, for LLM training, we also need the target values\n", + "- Similar to pretraining an LLM, the targets are the inputs shifted by 1 position to the right, so the LLM learns to predict the next token" + ] + }, + { + "cell_type": "markdown", + "id": "0386b6fe-3455-4e70-becd-a5a4681ba2ef", + "metadata": { + "id": "0386b6fe-3455-4e70-becd-a5a4681ba2ef" + }, + "source": [ + "" ] }, { "cell_type": "code", "execution_count": 15, - "id": "RTyB1vah9p56", + "id": "74af192e-757c-4c0a-bdf9-b7eb25bf6ebc", "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "RTyB1vah9p56", - "outputId": "245a8257-d1a3-4e94-a062-07b820b71aed" + "id": "74af192e-757c-4c0a-bdf9-b7eb25bf6ebc" }, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor(1.1269)" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ - "targets = torch.tensor([0, 1, -100])\n", - "inputs = torch.tensor(\n", - " [[-1., 1.],\n", - " [-0.5, 1.5],\n", - " [-0.5, 1.5]]\n", - ")\n", - "torch.nn.functional.cross_entropy(inputs, targets)" + "def custom_collate_draft_2(\n", + " batch,\n", + " pad_token_id=50256,\n", + " device=\"cpu\"\n", + "):\n", + " # Find the longest sequence in the batch\n", + " batch_max_length = max(len(item)+1 for item in batch)\n", + "\n", + " # Pad and prepare inputs\n", + " inputs_lst, targets_lst = [], []\n", + "\n", + " for item in batch:\n", + " new_item = item.copy()\n", + " # Add an <|endoftext|> token\n", + " new_item += [pad_token_id]\n", + " # Pad sequences to max_length\n", + " padded = new_item + [pad_token_id] * (batch_max_length - len(new_item))\n", + " inputs = torch.tensor(padded[:-1]) # Truncate the last token for inputs\n", + " targets = torch.tensor(padded[1:]) # Shift +1 to the right for targets\n", + " inputs_lst.append(inputs)\n", + " targets_lst.append(targets)\n", + "\n", + " # Convert list of inputs to tensor and transfer to target device\n", + " inputs_tensor = torch.stack(inputs_lst).to(device)\n", + " targets_tensor = torch.stack(targets_lst).to(device)\n", + " return inputs_tensor, targets_tensor" ] }, { "cell_type": "code", "execution_count": 16, + "id": "6eb2bce3-28a7-4f39-9d4b-5e972d69066c", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "6eb2bce3-28a7-4f39-9d4b-5e972d69066c", + "outputId": "319c9a66-3937-4178-d645-d1bb62d4cbd9" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([[ 0, 1, 2, 3, 4],\n", + " [ 5, 6, 50256, 50256, 50256],\n", + " [ 7, 8, 9, 50256, 50256]])\n", + "tensor([[ 1, 2, 3, 4, 50256],\n", + " [ 6, 50256, 50256, 50256, 50256],\n", + " [ 8, 9, 50256, 50256, 50256]])\n" + ] + } + ], + "source": [ + "inputs, targets = custom_collate_draft_2(batch)\n", + "print(inputs)\n", + "print(targets)" + ] + }, + { + "cell_type": "markdown", + "id": "3bf85703-a0e0-42aa-8f29-cbc28dbf4e15", + "metadata": { + "id": "3bf85703-a0e0-42aa-8f29-cbc28dbf4e15" + }, + "source": [ + "- Next, we introduce an `ignore_index` value to replace all padding token IDs with a new value; the purpose of this `ignore_index` is that we can ignore padding values in the loss function (more on that later)\n", + "\n", + "\n", + "\n", + "- Concretely, this means that we replace the token IDs corresponding to `50256` with `-100` as illustrated below" + ] + }, + { + "cell_type": "markdown", + "id": "bd4bed33-956e-4b3f-a09c-586d8203109a", + "metadata": { + "id": "bd4bed33-956e-4b3f-a09c-586d8203109a" + }, + "source": [ + "" + ] + }, + { + "cell_type": "markdown", + "id": "5346513e-c3f4-44fe-af22-4ebd36497728", + "metadata": { + "id": "5346513e-c3f4-44fe-af22-4ebd36497728" + }, + "source": [ + "- (In addition, we also introduce the `allowed_max_length` in case we want to limit the length of the samples; this will be useful if you plan to work with your own datasets that are longer than the 1024 token context size supported by the GPT-2 model)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, "id": "41ec6e2d-9eb2-4124-913e-d2af39be4cf2", "metadata": { "id": "41ec6e2d-9eb2-4124-913e-d2af39be4cf2" @@ -692,21 +839,23 @@ "\n", " # Pad and prepare inputs and targets\n", " inputs_lst, targets_lst = [], []\n", + "\n", " for item in batch:\n", + " new_item = item.copy()\n", " # Add an <|endoftext|> token\n", - " item += [pad_token_id]\n", + " new_item += [pad_token_id]\n", " # Pad sequences to max_length\n", - " padded = item + [pad_token_id] * (batch_max_length - len(item))\n", + " padded = new_item + [pad_token_id] * (batch_max_length - len(new_item))\n", " inputs = torch.tensor(padded[:-1]) # Truncate the last token for inputs\n", " targets = torch.tensor(padded[1:]) # Shift +1 to the right for targets\n", "\n", - " # Replace all but the first padding tokens in targets by ignore_index\n", + " # New: Replace all but the first padding tokens in targets by ignore_index\n", " mask = targets == pad_token_id\n", " indices = torch.nonzero(mask).squeeze()\n", " if indices.numel() > 1:\n", " targets[indices[1:]] = ignore_index\n", "\n", - " # Optionally truncate to maximum sequence length\n", + " # New: Optionally truncate to maximum sequence length\n", " if allowed_max_length is not None:\n", " inputs = inputs[:allowed_max_length]\n", " targets = targets[:allowed_max_length]\n", @@ -714,6 +863,7 @@ " inputs_lst.append(inputs)\n", " targets_lst.append(targets)\n", "\n", + " # Convert list of inputs and targets to tensors and transfer to target device\n", " inputs_tensor = torch.stack(inputs_lst).to(device)\n", " targets_tensor = torch.stack(targets_lst).to(device)\n", "\n", @@ -722,52 +872,244 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 18, "id": "cdf5eec4-9ebe-4be0-9fca-9a47bee88fdc", "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "cdf5eec4-9ebe-4be0-9fca-9a47bee88fdc", - "outputId": "0484b12b-b0d6-4329-d6d3-7a2b05fbaf8e" + "outputId": "c1aae7d5-10fd-4f55-ef6c-0fd6a045ab2d" }, "outputs": [ { - "data": { - "text/plain": [ - "(tensor([[ 0, 1, 2, 3, 4, 5, 6],\n", - " [ 7, 8, 9, 50256, 50256, 50256, 50256]]),\n", - " tensor([[ 1, 2, 3, 4, 5, 6, 50256],\n", - " [ 8, 9, 50256, -100, -100, -100, -100]]))" - ] - }, - "execution_count": 17, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([[ 0, 1, 2, 3, 4],\n", + " [ 5, 6, 50256, 50256, 50256],\n", + " [ 7, 8, 9, 50256, 50256]])\n", + "tensor([[ 1, 2, 3, 4, 50256],\n", + " [ 6, 50256, -100, -100, -100],\n", + " [ 8, 9, 50256, -100, -100]])\n" + ] } ], "source": [ - "inputs_1 = [0, 1, 2, 3, 4, 5, 6]\n", - "inputs_2 = [7, 8, 9]\n", - "\n", - "batch = (\n", - " inputs_1,\n", - " inputs_2\n", - ")\n", - "\n", - "custom_collate_fn(batch)" + "inputs, targets = custom_collate_fn(batch)\n", + "print(inputs)\n", + "print(targets)" + ] + }, + { + "cell_type": "markdown", + "id": "26727c90-0d42-43b3-af21-0a66ad4fbbc7", + "metadata": { + "id": "26727c90-0d42-43b3-af21-0a66ad4fbbc7" + }, + "source": [ + "- Let's see what this replacement by -100 accomplishes\n", + "- For illustration purposes, let's assume we have a small classification task with 2 class labels, 0 and 1, similar to chapter 6\n", + "- If we have the following logits values (outputs of the last layer of the model), we calculate the following loss" ] }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 19, + "id": "W2jvh-OP9MFV", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "W2jvh-OP9MFV", + "outputId": "2d3edcc3-17ca-42d4-9364-f1b4ed38648c" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor(1.1269)\n" + ] + } + ], + "source": [ + "logits_1 = torch.tensor(\n", + " [[-1.0, 1.0], # 1st training example\n", + " [-0.5, 1.5]] # 2nd training example\n", + ")\n", + "targets_1 = torch.tensor([0, 1])\n", + "\n", + "\n", + "loss_1 = torch.nn.functional.cross_entropy(logits_1, targets_1)\n", + "print(loss_1)" + ] + }, + { + "cell_type": "markdown", + "id": "5edd3244-8886-4505-92e9-367d28529e1e", + "metadata": { + "id": "5edd3244-8886-4505-92e9-367d28529e1e" + }, + "source": [ + "- Now, adding one more training example will, as expected, influence the loss" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "nvVMuil89v9N", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "nvVMuil89v9N", + "outputId": "4685690a-5420-4f65-bd5a-eb040bf969b3" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor(0.7936)\n" + ] + } + ], + "source": [ + "logits_2 = torch.tensor(\n", + " [[-1.0, 1.0],\n", + " [-0.5, 1.5],\n", + " [-0.5, 1.5]] # New 3rd training example\n", + ")\n", + "targets_2 = torch.tensor([0, 1, 1])\n", + "\n", + "loss_2 = torch.nn.functional.cross_entropy(logits_2, targets_2)\n", + "print(loss_2)" + ] + }, + { + "cell_type": "markdown", + "id": "54dca331-40e0-468b-b690-189fe156ba8f", + "metadata": { + "id": "54dca331-40e0-468b-b690-189fe156ba8f" + }, + "source": [ + "- Let's see what happens if we replace the class label of one of the examples with -100" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "RTyB1vah9p56", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "RTyB1vah9p56", + "outputId": "06e90424-81a2-40ae-8740-957be35b68de" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor(1.1269)\n", + "loss_1 == loss_3: tensor(True)\n" + ] + } + ], + "source": [ + "targets_3 = torch.tensor([0, 1, -100])\n", + "\n", + "loss_3 = torch.nn.functional.cross_entropy(logits_2, targets_3)\n", + "print(loss_3)\n", + "print(\"loss_1 == loss_3:\", loss_1 == loss_3)" + ] + }, + { + "cell_type": "markdown", + "id": "cef09d21-b652-4760-abea-4f76920e6a25", + "metadata": { + "id": "cef09d21-b652-4760-abea-4f76920e6a25" + }, + "source": [ + "- As we can see, the resulting loss on these 3 training examples is the same as the loss we calculated from the 2 training examples, which means that the cross entropy loss function ignored the training example with the -100 label\n", + "- By default, PyTorch has the `cross_entropy(..., ignore_index=-100)` setting to ignore examples corresponding to the label -100\n", + "- Using this -100 `ignore_index`, we can ignore the additional end-of-text (padding) tokens in the batches that we used to pad the training examples to equal length\n", + "- However, we don't want to ignore the first instance of the end-of-text (padding) token (50256) because it can help signal to the LLM when the response is complete" + ] + }, + { + "cell_type": "markdown", + "id": "6a4e9c5f-7c49-4321-9f1b-a50468a84524", + "metadata": { + "id": "6a4e9c5f-7c49-4321-9f1b-a50468a84524" + }, + "source": [ + "- In practice, it is also common to mask out the target token IDs that correspond to the instruction, as illustrated in the figure below (this is a recommended reader exercise after completing the chapter)" + ] + }, + { + "cell_type": "markdown", + "id": "fab8f0ed-80e8-4fd9-bf84-e5d0e0bc0a39", + "metadata": { + "id": "fab8f0ed-80e8-4fd9-bf84-e5d0e0bc0a39" + }, + "source": [ + "" + ] + }, + { + "cell_type": "markdown", + "id": "bccaf048-ec95-498c-9155-d5b3ccba6c96", + "metadata": { + "id": "bccaf048-ec95-498c-9155-d5b3ccba6c96" + }, + "source": [ + "## 7.4 Creating data loaders for an instruction dataset" + ] + }, + { + "cell_type": "markdown", + "id": "e6b8e656-3af3-4db6-8dde-d8c216a12f50", + "metadata": { + "id": "e6b8e656-3af3-4db6-8dde-d8c216a12f50" + }, + "source": [ + "- In this section, we use the `InstructionDataset` class and `custom_collate_fn` function to instantiate the training, validation, and test data loaders" + ] + }, + { + "cell_type": "markdown", + "id": "9fffe390-b226-4d5c-983f-9f4da773cb82", + "metadata": { + "id": "9fffe390-b226-4d5c-983f-9f4da773cb82" + }, + "source": [ + "" + ] + }, + { + "cell_type": "markdown", + "id": "932677e9-9317-42e8-b461-7b0269518f97", + "metadata": { + "id": "932677e9-9317-42e8-b461-7b0269518f97" + }, + "source": [ + "- Another additional detail of the previous `custom_collate_fn` function is that we now directly move the data to the target device (e.g., GPU) instead of doing it in the main training loop, which improves efficiency because it can be carried out as a background process when we use the `custom_collate_fn` as part of the data loader\n", + "- Using the `partial` function from Python's `functools` standard library, we create a new function with the `device` argument of the original function pre-filled" + ] + }, + { + "cell_type": "code", + "execution_count": 22, "id": "etpqqWh8phKc", "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "etpqqWh8phKc", - "outputId": "f2f902d2-d51a-4a62-a2ae-b1f52037c92f" + "outputId": "ec2b7e6e-3b60-4377-ab40-b74ed8b7ddad" }, "outputs": [ { @@ -784,12 +1126,22 @@ "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n", "print(\"Device:\", device)\n", "\n", - "customized_collate_fn = partial(custom_collate_fn, device=device)" + "customized_collate_fn = partial(custom_collate_fn, device=device, allowed_max_length=1024)" + ] + }, + { + "cell_type": "markdown", + "id": "8ff42c29-8b81-45e5-ae8d-b97cd1cf447a", + "metadata": { + "id": "8ff42c29-8b81-45e5-ae8d-b97cd1cf447a" + }, + "source": [ + "- Next, we instantiate the data loaders similar to previous chapters, except that we now provide our own collate function for the batching process" ] }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 23, "id": "BtWkgir6Hlpe", "metadata": { "id": "BtWkgir6Hlpe" @@ -816,7 +1168,7 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 24, "id": "1d097dc8-ad34-4f05-b435-e4147965f532", "metadata": { "id": "1d097dc8-ad34-4f05-b435-e4147965f532" @@ -842,16 +1194,26 @@ ")" ] }, + { + "cell_type": "markdown", + "id": "3f67c147-b1a2-4a95-9807-e2d0de0324c0", + "metadata": { + "id": "3f67c147-b1a2-4a95-9807-e2d0de0324c0" + }, + "source": [ + "- Let's see what the dimensions of the resulting input and target batches look like" + ] + }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 25, "id": "GGs1AI3vHpnX", "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "GGs1AI3vHpnX", - "outputId": "df95971c-10ca-49e8-9823-d63bc5b6a3fc" + "outputId": "8ed36fb6-fa13-47ad-c6fd-851b4bed51c4" }, "outputs": [ { @@ -980,58 +1342,91 @@ ], "source": [ "print(\"Train loader:\")\n", - "for x, y in train_loader:\n", - " print(x.shape, y.shape)" + "for inputs, targets in train_loader:\n", + " print(inputs.shape, targets.shape)" + ] + }, + { + "cell_type": "markdown", + "id": "0c8e8dd7-d46a-4cc3-8a7e-c1d31e1b4657", + "metadata": { + "id": "0c8e8dd7-d46a-4cc3-8a7e-c1d31e1b4657" + }, + "source": [ + "- As we can see based on the output above, all batches have a batch size of 8 but a different length, as expected\n", + "- Let's also double-check that the inputs contain the `<|endoftext|>` padding tokens corresponding to token ID 50256 by printing the contents of the first training example in the `inputs` batch" ] }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 26, "id": "21b8fd02-014f-4481-9b71-5bfee8f9dfcd", "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "21b8fd02-014f-4481-9b71-5bfee8f9dfcd", - "outputId": "cacf7f22-ec66-4350-8db4-890e7e86718f" + "outputId": "76360691-6f1d-4747-ca17-3ae127a0c93a" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "21106, 318, 281, 12064, 326, 8477, 257, 4876, 13, 19430, 257, 2882, 326, 20431, 32543, 262, 2581, 13, 198, 198, 21017, 46486, 25, 198, 30003, 6525, 262, 6827, 1262, 257, 985, 576, 13, 198, 198, 21017, 23412, 25, 198, 464, 5156, 318, 845, 13779, 13, 198, 198, 21017, 18261, 25, 198, 464, 5156, 318, 355, 13779, 355, 257, 4936, 13, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, " + "tensor([21106, 318, 281, 12064, 326, 8477, 257, 4876, 13, 19430,\n", + " 257, 2882, 326, 20431, 32543, 262, 2581, 13, 198, 198,\n", + " 21017, 46486, 25, 198, 30003, 6525, 262, 6827, 1262, 257,\n", + " 985, 576, 13, 198, 198, 21017, 23412, 25, 198, 464,\n", + " 5156, 318, 845, 13779, 13, 198, 198, 21017, 18261, 25,\n", + " 198, 464, 5156, 318, 355, 13779, 355, 257, 4936, 13,\n", + " 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256],\n", + " device='cuda:0')\n" ] } ], "source": [ - "for i in x[0]:\n", - " print(i.item(), end=\", \")" + "print(inputs[0])" + ] + }, + { + "cell_type": "markdown", + "id": "5f1f3647-8971-4006-89e0-6a2a1ec1d360", + "metadata": { + "id": "5f1f3647-8971-4006-89e0-6a2a1ec1d360" + }, + "source": [ + "- Similarly, we visually double-check that the targets contain the -100 placeholder tokens" ] }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 27, "id": "51649ab4-1a7e-4a9e-92c5-950a24fde211", "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "51649ab4-1a7e-4a9e-92c5-950a24fde211", - "outputId": "486fda24-80d4-4bc2-f253-2476f93cd146" + "outputId": "bebe4bc6-50c0-4c3c-bca3-a15025bbd087" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "318, 281, 12064, 326, 8477, 257, 4876, 13, 19430, 257, 2882, 326, 20431, 32543, 262, 2581, 13, 198, 198, 21017, 46486, 25, 198, 30003, 6525, 262, 6827, 1262, 257, 985, 576, 13, 198, 198, 21017, 23412, 25, 198, 464, 5156, 318, 845, 13779, 13, 198, 198, 21017, 18261, 25, 198, 464, 5156, 318, 355, 13779, 355, 257, 4936, 13, 50256, -100, -100, -100, -100, -100, -100, -100, -100, -100, " + "tensor([ 318, 281, 12064, 326, 8477, 257, 4876, 13, 19430, 257,\n", + " 2882, 326, 20431, 32543, 262, 2581, 13, 198, 198, 21017,\n", + " 46486, 25, 198, 30003, 6525, 262, 6827, 1262, 257, 985,\n", + " 576, 13, 198, 198, 21017, 23412, 25, 198, 464, 5156,\n", + " 318, 845, 13779, 13, 198, 198, 21017, 18261, 25, 198,\n", + " 464, 5156, 318, 355, 13779, 355, 257, 4936, 13, 50256,\n", + " -100, -100, -100, -100, -100, -100, -100, -100, -100],\n", + " device='cuda:0')\n" ] } ], "source": [ - "for i in y[0]:\n", - " print(i.item(), end=\", \")" + "print(targets[0])" ] }, { @@ -1041,32 +1436,69 @@ "id": "d6aad445-8f19-4238-b9bf-db80767fb91a" }, "source": [ - "## 7.4 Loading a pretrained LLM" + "## 7.5 Loading a pretrained LLM" + ] + }, + { + "cell_type": "markdown", + "id": "5a5c07d1-4fc9-4846-94cf-b11a085a667b", + "metadata": { + "id": "5a5c07d1-4fc9-4846-94cf-b11a085a667b" + }, + "source": [ + "- In this section, we load a pretrained GPT model using the same code that we used in section 5.5 of chapter 5 and section 6.4 in chapter 6" + ] + }, + { + "cell_type": "markdown", + "id": "8d1b438f-88af-413f-96a9-f059c6c55fc4", + "metadata": { + "id": "8d1b438f-88af-413f-96a9-f059c6c55fc4" + }, + "source": [ + "" + ] + }, + { + "cell_type": "markdown", + "id": "8c68eda7-e02e-4caa-846b-ca6dbd396ca2", + "metadata": { + "id": "8c68eda7-e02e-4caa-846b-ca6dbd396ca2" + }, + "source": [ + "- However, instead of loading the smallest 124 million parameter model, we load the medium version with 355 parameters since the 124 million model is too small for achieving qualitatively reasonable results via instruction finetuning" ] }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 28, "id": "0d249d67-5eba-414e-9bd2-972ebf01329d", "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "0d249d67-5eba-414e-9bd2-972ebf01329d", - "outputId": "ca78e098-c253-4bbe-ebb5-6fd018d8e037" + "outputId": "2e34f5b9-747c-4126-e612-2326d2ea033b" }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "checkpoint: 100%|██████████| 77.0/77.0 [00:00<00:00, 116kiB/s]\n", - "encoder.json: 100%|██████████| 1.04M/1.04M [00:02<00:00, 509kiB/s]\n", - "hparams.json: 100%|██████████| 91.0/91.0 [00:00<00:00, 138kiB/s]\n", - "model.ckpt.data-00000-of-00001: 100%|██████████| 1.42G/1.42G [02:49<00:00, 8.38MiB/s]\n", - "model.ckpt.index: 100%|██████████| 10.4k/10.4k [00:00<00:00, 13.8MiB/s]\n", + "2024-06-15 19:20:04.351655: I tensorflow/core/util/port.cc:113] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\n", + "2024-06-15 19:20:04.402386: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n", + "2024-06-15 19:20:04.402428: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n", + "2024-06-15 19:20:04.403935: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n", + "2024-06-15 19:20:04.412531: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n", + "To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n", + "2024-06-15 19:20:05.571079: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n", + "checkpoint: 100%|██████████| 77.0/77.0 [00:00<00:00, 156kiB/s]\n", + "encoder.json: 100%|██████████| 1.04M/1.04M [00:02<00:00, 467kiB/s]\n", + "hparams.json: 100%|██████████| 91.0/91.0 [00:00<00:00, 198kiB/s]\n", + "model.ckpt.data-00000-of-00001: 100%|██████████| 1.42G/1.42G [05:50<00:00, 4.05MiB/s]\n", + "model.ckpt.index: 100%|██████████| 10.4k/10.4k [00:00<00:00, 18.1MiB/s]\n", "model.ckpt.meta: 100%|██████████| 927k/927k [00:02<00:00, 454kiB/s]\n", - "vocab.bpe: 100%|██████████| 456k/456k [00:01<00:00, 321kiB/s]\n" + "vocab.bpe: 100%|██████████| 456k/456k [00:01<00:00, 283kiB/s]\n" ] } ], @@ -1101,16 +1533,26 @@ "model.eval();" ] }, + { + "cell_type": "markdown", + "id": "dbf3afed-bc8e-4d3a-ad9d-eb6f57bb7af5", + "metadata": { + "id": "dbf3afed-bc8e-4d3a-ad9d-eb6f57bb7af5" + }, + "source": [ + "- Before we start finetuning the model in the next section, let's see how it performs on one of the validation tasks" + ] + }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 29, "id": "7bd32b7c-5b44-4d25-a09f-46836802ca74", "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "7bd32b7c-5b44-4d25-a09f-46836802ca74", - "outputId": "e5dbf217-591c-4c2e-9ec2-ef5365fa269e" + "outputId": "07a5c9c3-7cdf-44ad-c3ac-ccd63cb0d9e0" }, "outputs": [ { @@ -1120,8 +1562,69 @@ "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n", "\n", "### Instruction:\n", - "Convert the active sentence to passive: 'The chef cooks the meal every day.'\n", - "\n", + "Convert the active sentence to passive: 'The chef cooks the meal every day.'\n" + ] + } + ], + "source": [ + "torch.manual_seed(123)\n", + "\n", + "input_text = format_input(val_data[0])\n", + "print(input_text)" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "id": "2e3e68e0-2627-4c65-b4e7-1e0667e4f6fa", + "metadata": { + "id": "2e3e68e0-2627-4c65-b4e7-1e0667e4f6fa" + }, + "outputs": [], + "source": [ + "from previous_chapters import (\n", + " generate,\n", + " text_to_token_ids,\n", + " token_ids_to_text\n", + ")\n", + "\n", + "token_ids = generate(\n", + " model=model,\n", + " idx=text_to_token_ids(input_text, tokenizer),\n", + " max_new_tokens=35,\n", + " context_size=BASE_CONFIG[\"context_length\"],\n", + " eos_id=50256,\n", + ")\n", + "generated_text = token_ids_to_text(token_ids, tokenizer)" + ] + }, + { + "cell_type": "markdown", + "id": "36e2fda5-f796-4954-8f72-1dd1123e3344", + "metadata": { + "id": "36e2fda5-f796-4954-8f72-1dd1123e3344" + }, + "source": [ + "- Note that the `generate` function we used in previous chapters returns the combined input and output text, which was convenient in the previous section for creating legible text\n", + "- To isolate the response, we can subtract the length of the instruction from the start of the `generated_text`" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "id": "ba4a55bf-a245-48d8-beda-2838a58fb5ba", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "ba4a55bf-a245-48d8-beda-2838a58fb5ba", + "outputId": "84659f07-0106-4bf7-b459-84599b8e4ee7" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ "### Response:\n", "\n", "The chef cooks the meal every day.\n", @@ -1133,22 +1636,18 @@ } ], "source": [ - "from previous_chapters import (\n", - " generate,\n", - " text_to_token_ids,\n", - " token_ids_to_text\n", - ")\n", - "\n", - "torch.manual_seed(123)\n", - "\n", - "token_ids = generate(\n", - " model=model,\n", - " idx=text_to_token_ids(format_input(val_data[0]), tokenizer),\n", - " max_new_tokens=35,\n", - " context_size=BASE_CONFIG[\"context_length\"],\n", - ")\n", - "\n", - "print(token_ids_to_text(token_ids, tokenizer))" + "response_text = generated_text[len(input_text):].strip()\n", + "print(response_text)" + ] + }, + { + "cell_type": "markdown", + "id": "d44080b2-a4c5-4520-a797-549519f66a3e", + "metadata": { + "id": "d44080b2-a4c5-4520-a797-549519f66a3e" + }, + "source": [ + "- As we can see, the model is not capable of following the instructions, yet; it creates a \"Response\" section but it simply repeats the original input sentence as well as the instruction" ] }, { @@ -1158,12 +1657,26 @@ "id": "70d27b9d-a942-4cf5-b797-848c5f01e723" }, "source": [ - "## 7.5 Finetuning the LLM on instruction data" + "## 7.6 Finetuning the LLM on instruction data" + ] + }, + { + "cell_type": "markdown", + "id": "314b2a39-88b4-44d8-8c85-1c5b0cd6cc4a", + "metadata": { + "id": "314b2a39-88b4-44d8-8c85-1c5b0cd6cc4a" + }, + "source": [ + "- In this section, we finetune the model\n", + "\n", + "\n", + "\n", + "- Note that we can reuse all the loss calculation and training functions that we used in previous chapters" ] }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 32, "id": "65444865-df87-4d98-9faf-875e1c4be860", "metadata": { "id": "65444865-df87-4d98-9faf-875e1c4be860" @@ -1176,23 +1689,33 @@ ")" ] }, + { + "cell_type": "markdown", + "id": "00083059-aa41-4d37-8a17-1c72d1b1ca00", + "metadata": { + "id": "00083059-aa41-4d37-8a17-1c72d1b1ca00" + }, + "source": [ + "- Let's calculate the initial training and validation set loss before we start training (as in previous chapters, the goal is to minimize the loss)" + ] + }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 33, "id": "d99fc6f8-63b2-43da-adbb-a7b6b92c8dd5", "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "d99fc6f8-63b2-43da-adbb-a7b6b92c8dd5", - "outputId": "a4d82a24-f16e-4cf7-ebe6-0bff051517a1" + "outputId": "f28bd4fd-411f-4f62-b381-4c21c09a2b01" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Training loss: 3.8259091854095457\n", + "Training loss: 3.825908660888672\n", "Validation loss: 3.7619335651397705\n" ] } @@ -1200,9 +1723,9 @@ "source": [ "model.to(device)\n", "\n", - "torch.manual_seed(123) # For reproducibility due to the shuffling in the data loader\n", + "torch.manual_seed(123)\n", "\n", - "with torch.no_grad(): # Disable gradient tracking for efficiency because we are not training, yet\n", + "with torch.no_grad():\n", " train_loss = calc_loss_loader(train_loader, model, device, num_batches=5)\n", " val_loss = calc_loss_loader(val_loader, model, device, num_batches=5)\n", "\n", @@ -1210,6 +1733,17 @@ "print(\"Validation loss:\", val_loss)" ] }, + { + "cell_type": "markdown", + "id": "12a6da8f-15b3-42b0-a136-619b7a35c3e9", + "metadata": { + "id": "12a6da8f-15b3-42b0-a136-619b7a35c3e9" + }, + "source": [ + "- Note that the training is a bit more expensive than in previous chapters since we are using a larger model (355 million instead of 124 million parameters)\n", + "- The runtimes for various devices are shown for reference below (running this notebook on a compatible GPU device requires no changes to the code)" + ] + }, { "cell_type": "markdown", "id": "db4b57fb-e689-4550-931c-6d34a932487c", @@ -1217,33 +1751,32 @@ "id": "db4b57fb-e689-4550-931c-6d34a932487c" }, "source": [ - "- Runtimes:\n", - "\n", "
\n", " \n", - "| Model | Platform | Runtime |\n", - "|--------------------|-----------------------|----------------|\n", - "| gpt2-medium (355M) | CPU (M3 MacBook Air) | 23.67 minutes |\n", - "| gpt2-medium (355M) | GPU (L4) | 2.98 minutes |\n", - "| gpt2-medium (355M) | GPU (A100) | 1.29 minutes |\n", - "| gpt2-small (124M) | CPU (M3 MacBook Air) | 8.61 minutes |\n", - "| gpt2-small (124M) | GPU (A100) | 0.59 minutes |\n", + "| Model | Device | Runtime for 2 Epochs |\n", + "|--------------------|-----------------------|----------------------|\n", + "| gpt2-medium (355M) | CPU (M3 MacBook Air) | 15.78 minutes |\n", + "| gpt2-medium (355M) | GPU (L4) | 1.83 minutes |\n", + "| gpt2-medium (355M) | GPU (A100) | 0.86 minutes |\n", + "| gpt2-small (124M) | CPU (M3 MacBook Air) | 5.74 minutes |\n", + "| gpt2-small (124M) | GPU (L4) | 0.69 minutes |\n", + "| gpt2-small (124M) | GPU (A100) | 0.39 minutes |\n", "\n", "
\n", "\n", - "- This notebook was run with the `\"gpt2-medium (355M)\"` model" + "- I ran this notebook using the `\"gpt2-medium (355M)\"` model" ] }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 34, "id": "78bcf83a-1fff-4540-97c1-765c4016d5e3", "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "78bcf83a-1fff-4540-97c1-765c4016d5e3", - "outputId": "285ca27c-019f-4c2b-e130-8c46d2e7df53" + "outputId": "d49900e9-cb54-4c89-b528-fa4cc2e0dd9b" }, "outputs": [ { @@ -1251,79 +1784,55 @@ "output_type": "stream", "text": [ "Ep 1 (Step 000000): Train loss 2.637, Val loss 2.626\n", - "Ep 1 (Step 000005): Train loss 1.174, Val loss 1.102\n", + "Ep 1 (Step 000005): Train loss 1.174, Val loss 1.103\n", "Ep 1 (Step 000010): Train loss 0.872, Val loss 0.944\n", "Ep 1 (Step 000015): Train loss 0.857, Val loss 0.906\n", "Ep 1 (Step 000020): Train loss 0.776, Val loss 0.881\n", "Ep 1 (Step 000025): Train loss 0.754, Val loss 0.859\n", - "Ep 1 (Step 000030): Train loss 0.799, Val loss 0.836\n", - "Ep 1 (Step 000035): Train loss 0.714, Val loss 0.808\n", + "Ep 1 (Step 000030): Train loss 0.800, Val loss 0.836\n", + "Ep 1 (Step 000035): Train loss 0.714, Val loss 0.809\n", "Ep 1 (Step 000040): Train loss 0.672, Val loss 0.806\n", "Ep 1 (Step 000045): Train loss 0.633, Val loss 0.789\n", - "Ep 1 (Step 000050): Train loss 0.663, Val loss 0.783\n", + "Ep 1 (Step 000050): Train loss 0.663, Val loss 0.782\n", "Ep 1 (Step 000055): Train loss 0.760, Val loss 0.763\n", "Ep 1 (Step 000060): Train loss 0.719, Val loss 0.743\n", "Ep 1 (Step 000065): Train loss 0.653, Val loss 0.735\n", - "Ep 1 (Step 000070): Train loss 0.532, Val loss 0.729\n", - "Ep 1 (Step 000075): Train loss 0.569, Val loss 0.728\n", - "Ep 1 (Step 000080): Train loss 0.605, Val loss 0.725\n", - "Ep 1 (Step 000085): Train loss 0.509, Val loss 0.709\n", - "Ep 1 (Step 000090): Train loss 0.562, Val loss 0.691\n", - "Ep 1 (Step 000095): Train loss 0.500, Val loss 0.681\n", - "Ep 1 (Step 000100): Train loss 0.503, Val loss 0.677\n", - "Ep 1 (Step 000105): Train loss 0.564, Val loss 0.670\n", - "Ep 1 (Step 000110): Train loss 0.555, Val loss 0.666\n", - "Ep 1 (Step 000115): Train loss 0.508, Val loss 0.664\n", + "Ep 1 (Step 000070): Train loss 0.535, Val loss 0.732\n", + "Ep 1 (Step 000075): Train loss 0.568, Val loss 0.738\n", + "Ep 1 (Step 000080): Train loss 0.603, Val loss 0.733\n", + "Ep 1 (Step 000085): Train loss 0.515, Val loss 0.716\n", + "Ep 1 (Step 000090): Train loss 0.573, Val loss 0.698\n", + "Ep 1 (Step 000095): Train loss 0.505, Val loss 0.688\n", + "Ep 1 (Step 000100): Train loss 0.507, Val loss 0.683\n", + "Ep 1 (Step 000105): Train loss 0.568, Val loss 0.675\n", + "Ep 1 (Step 000110): Train loss 0.562, Val loss 0.670\n", + "Ep 1 (Step 000115): Train loss 0.520, Val loss 0.665\n", "Below is an instruction that describes a task. Write a response that appropriately completes the request. ### Instruction: Convert the active sentence to passive: 'The chef cooks the meal every day.' ### Response: The meal is prepared every day by the chef.<|endoftext|>The following is an instruction that describes a task. Write a response that appropriately completes the request. ### Instruction: Convert the active sentence to passive:\n", - "Ep 2 (Step 000120): Train loss 0.435, Val loss 0.672\n", - "Ep 2 (Step 000125): Train loss 0.451, Val loss 0.687\n", - "Ep 2 (Step 000130): Train loss 0.447, Val loss 0.683\n", - "Ep 2 (Step 000135): Train loss 0.405, Val loss 0.682\n", - "Ep 2 (Step 000140): Train loss 0.409, Val loss 0.681\n", - "Ep 2 (Step 000145): Train loss 0.369, Val loss 0.680\n", - "Ep 2 (Step 000150): Train loss 0.382, Val loss 0.675\n", - "Ep 2 (Step 000155): Train loss 0.413, Val loss 0.675\n", - "Ep 2 (Step 000160): Train loss 0.415, Val loss 0.683\n", - "Ep 2 (Step 000165): Train loss 0.379, Val loss 0.686\n", - "Ep 2 (Step 000170): Train loss 0.323, Val loss 0.681\n", + "Ep 2 (Step 000120): Train loss 0.438, Val loss 0.670\n", + "Ep 2 (Step 000125): Train loss 0.453, Val loss 0.685\n", + "Ep 2 (Step 000130): Train loss 0.448, Val loss 0.681\n", + "Ep 2 (Step 000135): Train loss 0.408, Val loss 0.677\n", + "Ep 2 (Step 000140): Train loss 0.409, Val loss 0.676\n", + "Ep 2 (Step 000145): Train loss 0.373, Val loss 0.676\n", + "Ep 2 (Step 000150): Train loss 0.381, Val loss 0.674\n", + "Ep 2 (Step 000155): Train loss 0.421, Val loss 0.677\n", + "Ep 2 (Step 000160): Train loss 0.416, Val loss 0.686\n", + "Ep 2 (Step 000165): Train loss 0.381, Val loss 0.688\n", + "Ep 2 (Step 000170): Train loss 0.329, Val loss 0.679\n", "Ep 2 (Step 000175): Train loss 0.337, Val loss 0.669\n", - "Ep 2 (Step 000180): Train loss 0.392, Val loss 0.657\n", - "Ep 2 (Step 000185): Train loss 0.415, Val loss 0.657\n", - "Ep 2 (Step 000190): Train loss 0.340, Val loss 0.648\n", - "Ep 2 (Step 000195): Train loss 0.329, Val loss 0.635\n", - "Ep 2 (Step 000200): Train loss 0.310, Val loss 0.635\n", - "Ep 2 (Step 000205): Train loss 0.352, Val loss 0.631\n", - "Ep 2 (Step 000210): Train loss 0.367, Val loss 0.630\n", - "Ep 2 (Step 000215): Train loss 0.396, Val loss 0.634\n", - "Ep 2 (Step 000220): Train loss 0.300, Val loss 0.647\n", - "Ep 2 (Step 000225): Train loss 0.347, Val loss 0.660\n", - "Ep 2 (Step 000230): Train loss 0.294, Val loss 0.655\n", + "Ep 2 (Step 000180): Train loss 0.393, Val loss 0.657\n", + "Ep 2 (Step 000185): Train loss 0.420, Val loss 0.659\n", + "Ep 2 (Step 000190): Train loss 0.342, Val loss 0.651\n", + "Ep 2 (Step 000195): Train loss 0.328, Val loss 0.636\n", + "Ep 2 (Step 000200): Train loss 0.312, Val loss 0.635\n", + "Ep 2 (Step 000205): Train loss 0.353, Val loss 0.633\n", + "Ep 2 (Step 000210): Train loss 0.368, Val loss 0.634\n", + "Ep 2 (Step 000215): Train loss 0.395, Val loss 0.639\n", + "Ep 2 (Step 000220): Train loss 0.301, Val loss 0.652\n", + "Ep 2 (Step 000225): Train loss 0.350, Val loss 0.664\n", + "Ep 2 (Step 000230): Train loss 0.300, Val loss 0.657\n", "Below is an instruction that describes a task. Write a response that appropriately completes the request. ### Instruction: Convert the active sentence to passive: 'The chef cooks the meal every day.' ### Response: The meal is cooked every day by the chef.<|endoftext|>The following is an instruction that describes a task. Write a response that appropriately completes the request. ### Instruction: What is the capital of the United Kingdom\n", - "Ep 3 (Step 000235): Train loss 0.328, Val loss 0.661\n", - "Ep 3 (Step 000240): Train loss 0.280, Val loss 0.692\n", - "Ep 3 (Step 000245): Train loss 0.274, Val loss 0.702\n", - "Ep 3 (Step 000250): Train loss 0.248, Val loss 0.691\n", - "Ep 3 (Step 000255): Train loss 0.275, Val loss 0.680\n", - "Ep 3 (Step 000260): Train loss 0.266, Val loss 0.683\n", - "Ep 3 (Step 000265): Train loss 0.274, Val loss 0.701\n", - "Ep 3 (Step 000270): Train loss 0.280, Val loss 0.715\n", - "Ep 3 (Step 000275): Train loss 0.276, Val loss 0.705\n", - "Ep 3 (Step 000280): Train loss 0.296, Val loss 0.710\n", - "Ep 3 (Step 000285): Train loss 0.294, Val loss 0.714\n", - "Ep 3 (Step 000290): Train loss 0.287, Val loss 0.717\n", - "Ep 3 (Step 000295): Train loss 0.267, Val loss 0.711\n", - "Ep 3 (Step 000300): Train loss 0.271, Val loss 0.694\n", - "Ep 3 (Step 000305): Train loss 0.277, Val loss 0.686\n", - "Ep 3 (Step 000310): Train loss 0.276, Val loss 0.689\n", - "Ep 3 (Step 000315): Train loss 0.238, Val loss 0.688\n", - "Ep 3 (Step 000320): Train loss 0.255, Val loss 0.691\n", - "Ep 3 (Step 000325): Train loss 0.235, Val loss 0.693\n", - "Ep 3 (Step 000330): Train loss 0.233, Val loss 0.696\n", - "Ep 3 (Step 000335): Train loss 0.224, Val loss 0.698\n", - "Ep 3 (Step 000340): Train loss 0.243, Val loss 0.687\n", - "Ep 3 (Step 000345): Train loss 0.244, Val loss 0.675\n", - "Below is an instruction that describes a task. Write a response that appropriately completes the request. ### Instruction: Convert the active sentence to passive: 'The chef cooks the meal every day.' ### Response: The chef cooks the meal every day.<|endoftext|>The following is an instruction that describes a task. Write a response that appropriately completes the request. ### Instruction: What is the capital of the United Kingdom? \n", - "Training completed in 2.98 minutes.\n" + "Training completed in 0.87 minutes.\n" ] } ], @@ -1336,7 +1845,7 @@ "\n", "optimizer = torch.optim.AdamW(model.parameters(), lr=0.00005, weight_decay=0.1)\n", "\n", - "num_epochs = 3\n", + "num_epochs = 2\n", "\n", "train_losses, val_losses, tokens_seen = train_model_simple(\n", " model, train_loader, val_loader, optimizer, device,\n", @@ -1349,22 +1858,43 @@ "print(f\"Training completed in {execution_time_minutes:.2f} minutes.\")" ] }, + { + "cell_type": "markdown", + "id": "Ise3wGjlB-iq", + "metadata": { + "id": "Ise3wGjlB-iq" + }, + "source": [ + "- As we can see based on the outputs above, the model trains well, as we can tell based on the decreasing training loss and validation loss values\n", + "- Furthermore, based on the response text printed after each epoch, we can see that the model correctly follows the instruction to convert the input sentence `'The chef cooks the meal every day.'` into passive voice `'The meal is cooked every day by the chef.'` (We will properly format and evaluate the responses in a later section)\n", + "- Finally, let's take a look at the training and validation loss curves" + ] + }, { "cell_type": "code", - "execution_count": 32, - "id": "1Vdh7jmHI1we", + "execution_count": 35, + "id": "4acd368b-1403-4807-a218-9102e35bfdbb", "metadata": { "colab": { "base_uri": "https://localhost:8080/", - "height": 308 + "height": 325 }, - "id": "1Vdh7jmHI1we", - "outputId": "475faf7f-13e6-4168-84f2-3eb3897ffd73" + "id": "4acd368b-1403-4807-a218-9102e35bfdbb", + "outputId": "3b6ebb63-6b97-4e86-ce10-ada80b881db6" }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAegAAAEiCAYAAAAyI0HeAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABhEklEQVR4nO3deVwU9f8H8Nfuwl7Act+XoIiIyKUQYGpJopmJZZr5TS2PLMz8WWZ+K89vaWlqpallSqWpaV55452ItygqoiKCB4dynwu7+/n9MbK4AnIt7ILv5+MxD3ZnPjPznll23vuZz8xneIwxBkIIIYToFb6uAyCEEEJIdZSgCSGEED1ECZoQQgjRQ5SgCSGEED1ECZoQQgjRQ5SgCSGEED1ECZoQQgjRQ5SgCSGEED1ECZoQQgjRQ5SgCWkDbt++DR6Ph/j4eF2HQgjREkrQhOgJHo/31GHWrFm6DpEQ0oIMdB0AIYSTnp6ufr1x40bMmDEDSUlJ6nHGxsa6CIsQoiNUgyZET9jZ2akHU1NT8Hg89XsbGxssWrQITk5OEIlE8PPzw969e2tdllKpxLvvvotOnTohLS0NALB9+3YEBARALBbD3d0ds2fPhkKhUM/D4/GwatUqDB48GFKpFB4eHtixY4d6em5uLkaMGAFra2tIJBJ4eHhgzZo1tcawefNm+Pj4QCKRwNLSEuHh4SguLlZPX7VqFby8vCAWi9GpUyf89NNPGvPfuXMHQ4cOhZmZGSwsLDBo0CDcvn1bPX306NGIjIzEwoULYW9vD0tLS0RFRaGioqLe+5wQvcYIIXpnzZo1zNTUVP1+0aJFTCaTsfXr17Nr166xTz/9lBkaGrLr168zxhhLSUlhANiFCxdYWVkZGzx4MPP392dZWVmMMcaOHTvGZDIZi46OZsnJyWz//v2sXbt2bNasWep1AGBOTk7szz//ZDdu3GCTJk1ixsbGLDs7mzHGWFRUFPPz82NnzpxhKSkpLCYmhu3YsaPG+O/fv88MDAzYokWLWEpKCrt06RJbtmwZKywsZIwxtnbtWmZvb8/+/vtvduvWLfb3338zCwsLFh0dzRhjrLy8nHl5ebF3332XXbp0iV29epW99dZbzNPTk8nlcsYYY6NGjWIymYxNmDCBJSYmsn/++YdJpVL2888/a/fDIERHKEETooeeTNAODg7sq6++0ijTvXt39sEHHzDGqhL0v//+y/r06cN69OjB8vLy1GX79OnDvv76a435//jjD2Zvb69+D4B98cUX6vdFRUUMANuzZw9jjLGBAweyd955p17xnzt3jgFgt2/frnF6+/bt2Z9//qkxbu7cuSwkJEQdm6enJ1OpVOrpcrmcSSQStm/fPsYYl6BdXV2ZQqFQl3njjTfYsGHD6hUjIfqO2qAJ0XMFBQW4f/8+wsLCNMaHhYXh4sWLGuOGDx8OJycnHDp0CBKJRD3+4sWLiI2NxVdffaUep1QqUVZWhpKSEkilUgBA165d1dONjIwgk8mQlZUFAHj//ffx+uuv4/z58+jbty8iIyMRGhpaY8y+vr7o06cPfHx8EBERgb59+2LIkCEwNzdHcXExkpOTMWbMGIwbN049j0KhgKmpqTremzdvwsTERGO5ZWVlSE5OVr/39vaGQCBQv7e3t0dCQsJT9iYhrQclaELakJdffhlr165FXFwcXnzxRfX4oqIizJ49G6+99lq1ecRisfq1oaGhxjQejweVSgUA6N+/P1JTU7F7927ExMSgT58+iIqKwsKFC6stUyAQICYmBidOnMD+/fvx448/4vPPP8epU6fUPwZ++eUXBAcHV5uvMt7AwECsW7eu2rKtra3rFS8hrR0laEL0nEwmg4ODA2JjY9GrVy/1+NjYWAQFBWmUff/999GlSxe8+uqr2LVrl7p8QEAAkpKS0KFDhybFYm1tjVGjRmHUqFF4/vnnMXXq1BoTNMAly7CwMISFhWHGjBlwdXXF1q1bMWXKFDg4OODWrVsYMWJEjfMGBARg48aNsLGxgUwma1LMhLRWlKAJaQWmTp2KmTNnon379vDz88OaNWsQHx9fYw3zww8/hFKpxCuvvII9e/agR48emDFjBl555RW4uLhgyJAh4PP5uHjxIi5fvoz//e9/9YphxowZCAwMhLe3N+RyOXbu3AkvL68ay546dQoHDx5E3759YWNjg1OnTuHBgwfq8rNnz8akSZNgamqKfv36QS6X4+zZs8jNzcWUKVMwYsQILFiwAIMGDcKcOXPg5OSE1NRUbNmyBZ9++imcnJwavzMJaSUoQRPSCkyaNAn5+fn4+OOPkZWVhc6dO2PHjh3w8PCosfzkyZOhUqnw8ssvY+/evYiIiMDOnTsxZ84cfPPNNzA0NESnTp0wduzYescgFAoxffp03L59GxKJBM8//zw2bNhQY1mZTIZjx45hyZIlKCgogKurK7777jv0798fADB27FhIpVIsWLAAU6dOhZGREXx8fDB58mQAgFQqxbFjxzBt2jS89tprKCwshKOjI/r06UM1avLM4DHGmK6DIIQQQogm6qiEEEII0UOUoAkhhBA9RAmaEEII0UOUoAkhhBA9RAmaEEII0UOUoAkhhBA9RAm6kZYtW4Z27dpBLBYjODgYp0+fbrZ1zZs3D927d4eJiQlsbGwQGRmp8ZxgAOjduzd4PJ7GMGHCBI0yaWlpGDBgAKRSKWxsbDB16lSNxw0CwJEjRxAQEACRSIQOHTogOjq6WjwN2fZZs2ZVi6tTp07q6WVlZYiKioKlpSWMjY3x+uuvIzMzU+dxt2vXrlrcPB4PUVFRAPRrfx87dgwDBw6Eg4MDeDwetm3bpjGdMYYZM2bA3t4eEokE4eHhuHHjhkaZnJwcjBgxAjKZDGZmZhgzZgyKioo0yly6dAnPP/88xGIxnJ2d8e2331aLddOmTejUqRPEYjF8fHywe/fup8YSGBiIF198scbYKyoqMG3aNPj4+MDIyAgODg4YOXIk7t+/r7HMmj6r+fPnN2vsIpEI1tbWsLW1rXGfjx49ulpM/fr10/t9DqDG/3sej4cFCxbodJ8bGhpCJpPB2Ni41uOgPh1P6hNLnXT4oI5Wa8OGDUwoFLLVq1ezK1eusHHjxjEzMzOWmZnZLOuLiIhga9asYZcvX2bx8fHs5ZdfZi4uLqyoqEhdplevXmzcuHEsPT1dPeTn56unKxQK1qVLFxYeHs4uXLjAdu/ezaysrNj06dPVZW7dusWkUimbMmUKu3r1Kvvxxx+ZQCBge/fubfS2z5w5k3l7e2vE9eDBA/X0CRMmMGdnZ3bw4EF29uxZ9txzz7HQ0FCdx52VlaURc0xMDAPADh8+rHf7e/fu3ezzzz9nW7ZsYQDY1q1bNbZl/vz5zNTUlG3bto1dvHiRvfrqq8zNzY2Vlpaqy/Tr14/5+vqykydPsn///Zd16NCBDR8+XD09Pz+f2drashEjRrDLly+z9evXM4lEwlauXKkuExsbywQCAfv222/Z1atX2RdffMEMDQ1ZQkJCrbEEBwczU1NTtmHDhmqx5+XlsfDwcLZx40Z27do1FhcXx4KCglhgYKDG9rm6urI5c+ZofBaPfzeaI/Zly5YxDw8PZmNjU+M+HzVqFOvXr59GTDk5ORpl9HGfM8Y0Yk5PT2erV69mPB6PJScn63Sfh4aGMj8/P+bo6MhOnTpV43FQn44ndcVSH5SgGyEoKIhFRUWp3yuVSubg4MDmzZvXIuvPyspiANjRo0fV43r16sU++uijWufZvXs34/P5LCMjQz1u+fLlTCaTqZ+v++mnnzJvb2+N+YYNG8YiIiLU7xu67TNnzmS+vr41TsvLy2OGhoZs06ZN6nGJiYkMAIuLi9Np3E/66KOPWPv27dWPP9TX/f3kAVelUjE7Ozu2YMEC9bi8vDwmEonY+vXrGWOMXb16lQFgZ86cUZfZs2cP4/F47N69e4wxxn766Sdmbm6ujp0xxqZNm8Y8PT3V74cOHcoGDBigEU9wcDB777336hVLTcniSadPn2YAWGpqqnqcq6srW7x4ca3zNHfstSXoQYMG1RpTa9rngwYNYi+++KLGOF3v8/Xr11c7DurT8aQ+sdQHneJuoPLycpw7dw7h4eHqcXw+H+Hh4YiLi2uRGPLz8wEAFhYWGuPXrVsHKysrdOnSBdOnT0dJSYl6WlxcHHx8fGBra6seFxERgYKCAly5ckVd5vHtqixTuV2N3fYbN27AwcEB7u7uGDFiBNLS0gAA586dQ0VFhcbyOnXqBBcXF/XydBl3pfLycqxduxbvvvsueDyeery+7u/HpaSkICMjQ2MZpqamCA4O1tjHZmZm6Natm7pMeHg4+Hw+Tp06pS7Ts2dPCIVCjViTkpKQm5tbr+2pTyx1yc/PB4/Hg5mZmcb4+fPnw9LSEv7+/liwYIHGKcvmjr02R44cgY2NDTw9PfH+++8jOztbI6bWsM8zMzOxa9cujBkzpto0Xe7zuLi4asdBfTqe1CeW+qC+uBvo4cOHUCqVGh8wANja2uLatWvNvn6VSoXJkycjLCwMXbp0UY9/66234OrqCgcHB1y6dAnTpk1DUlIStmzZAgDIyMioMebKaU8rU1BQgNLSUuTm5jZ424ODgxEdHQ1PT0+kp6dj9uzZeP7553H58mVkZGRAKBRWO9ja2trWGVNzx/24bdu2IS8vD6NHj1aP09f9/aTKddW0jMfjsLGx0ZhuYGAACwsLjTJubm61bo+5uXmt2/P4MuqK5WnKysowbdo0DB8+XKM/7kmTJiEgIAAWFhY4ceIEpk+fjvT0dCxatKhFYq9Jv3798Nprr8HNzQ3Jycn473//i/79+yMuLg4CgaDV7PPffvsNJiYm1R5Tqut9np6eXu04qE/Hk/rEUh+UoFuZqKgoXL58GcePH9cYP378ePVrHx8f2Nvbo0+fPkhOTkb79u1bOky1yocjAEDXrl0RHBwMV1dX/PXXX5BIJDqLqyF+/fVX9O/fHw4ODupx+rq/26qKigoMHToUjDEsX75cY9qUKVPUr7t27QqhUIj33nsP8+bNg0gkaulQAQBvvvmm+rWPjw+6du2K9u3b48iRI+jTp49OYmqM1atXY8SIERrPDAd0v8/Pnz8PhUJR7TjY1tAp7gaysrKCQCCodjVeZmYm7OzsmnXdEydOxM6dO3H48OE6H7dXeert5s2bAAA7O7saY66c9rQyMpkMEolEK9tuZmaGjh074ubNm7Czs0N5eTny8vJqXZ6u405NTcWBAwfqfOqTvu7vynJPW4adnR2ysrI0pisUCuTk5Gjlc3h8el2x1KQyOaempiImJqbOp1kFBwdDoVDg9u3bLRJ7fbi7u8PKykrj/0Of9zkA/Pvvv0hKSqrXE89acp/HxsYiMzOz2nFQn44n9YmlPihBN5BQKERgYCAOHjyoHqdSqXDw4EGEhIQ0yzoZY5g4cSK2bt2KQ4cOVTt1VJP4+HgAgL29PQAgJCQECQkJGgeFyoNd586d1WUe367KMpXbpY1tLyoqQnJyMuzt7REYGAhDQ0ON5SUlJSEtLU29PF3HvWbNGtjY2GDAgAFPLaev+9vNzQ12dnYayygoKMCpU6c09nFeXh7OnTunLnPo0CGoVCr1D4+QkBAcO3YMFRUVGrF6enrC3Ny8XttTn1ieVJmcb9y4gQMHDsDS0rLObY6Pjwefz1efQm7u2Ovj7t27yM7O1vj/0Nd9XunXX39FYGAgfH1969y+ltjnjDGMHz8e9+/fx5w5c6odB/XpeFKfWOql3peTEbUNGzYwkUjEoqOj2dWrV9n48eOZmZmZxpWB2vT+++8zU1NTduTIEY3bGkpKShhjjN28eZPNmTOHnT17lqWkpLDt27czd3d31rNnT/UyKm8v6Nu3L4uPj2d79+5l1tbWNd5eMHXqVJaYmMiWLVtW4+0FDdn2jz/+mB05coSlpKSw2NhYFh4ezqysrFhWVhZjjLsVwcXFhR06dIidPXuWhYSEsJCQEJ3HzRh3ZaaLiwubNm2axnh929+FhYXswoUL7MKFCwwAW7RoEbtw4YL6Suf58+czMzMztn37dnbp0iU2aNCgGm+z8vf3Z6dOnWLHjx9nHh4eGrf85OXlMVtbW/b222+zy5cvsw0bNjCpVFrtthkDAwO2cOFClpiYyGbOnFnjbTOPxzJgwADm4ODATp48WS328vJy9uqrrzInJycWHx+v8b9fecXtiRMn2OLFi1l8fDxLTk5ma9euZdbW1mzkyJHNGvvJkydZ7969mYODQ7W4CwsL2SeffMLi4uJYSkoKO3DgAAsICGAeHh6srKxMr/d5pfz8fCaVStny5cvZk3S1z4cOHcoMDAyYnZ0dS0lJqXYcZEy/jid1xVIflKAb6ccff2QuLi5MKBSyoKAgdvLkyWZbF4AahzVr1jDGGEtLS2M9e/ZkFhYWTCQSsQ4dOrCpU6dq3JfLGGO3b99m/fv3ZxKJhFlZWbGPP/6YVVRUaJQ5fPgw8/PzY0KhkLm7u6vX8biGbPuwYcOYvb09EwqFzNHRkQ0bNozdvHlTPb20tJR98MEHzNzcnEmlUjZ48GCWnp6u87gZY2zfvn0MAEtKStIYr2/7+/DhwzX+f4waNYoxxt2u8uWXXzJbW1smEolYnz59qm1TdnY2Gz58ODM2NmYymYy98847rLCwUKPMxYsXWY8ePZhIJGKOjo5s/vz51WL966+/WMeOHZlQKGTe3t5s165dGtOfjCUgIKDW2FNSUmr936+8H/3cuXPq+3rFYjHz8vJiX3/9tUYibI7YDQ0Na427pKSE9e3bl1lbWzNDQ0Pm6urKxo0bV+3HoD7u80orV65kEomE5eXlVVufrvZ5XcdBxvTreFKfWOrCY4yx+te3CSGEENISqA2aEEII0UOUoAkhhBA9RAmaEEII0UOUoAkhhBA9RAmaEEII0UOUoAkhhBA9RAm6keRyOWbNmgW5XK7rUBqstcbeWuMGKHZdaK1xA6039tYaN6CfsdN90I1UUFAAU1NT5Ofn19k3sL5prbG31rgBil0XWmvcQOuNvbXGDehn7FSDJoQQQvQQJWhCCCFEDz1zz4NWKBS4cOECbG1twec3/vdJYWEhAODevXsoKCjQVngtorXG3lrjBih2XWitcQOtN/bWGjfQMrGrVCpkZmbC398fBgZ1p99nrg36zJkzCAoK0nUYhBBCnlGnT59G9+7d6yz3zNWgbW1tAXA7qPLZrIQQQkhzS09PR1BQkDoP1eWZS9CVp7Xt7e3h5OSk42gIIYQ8a+rbvEoXiRFCCCF6iBI0IYQQoocoQRNCCCF66JlrgyaEkCcplUpUVFToOgzSyhkaGkIgEGhteZSgm+DUrWzklVYgrIMVjEW0KwlpbRhjyMjIQF5enq5DIW2EmZkZ7OzswOPxmrwsyipNMHH9BTwolGP3pOfR2UE/+m4lhNRfZXK2sbGBVCrVykGVPJsYYygpKUFWVhYAaOU2XkrQTfAWPwYCgwyUZ7sADj66DocQ0gBKpVKdnC0tLXUdDmkDJBIJACArKws2NjZNPt1NF4k1wRDFTkwy2AaWk6LrUAghDVTZ5iyVSnUcCWlLKv+ftHFNAyXoJpDzuQ+ivLR19TlLCKlCp7WJNmnz/4kSdBNUCIwAAMoSStCEEEK0ixJ0EygMuAStkhfqOBJCCGm8du3aYcmSJfUuf+TIEfB4vGa/+j06OhpmZmbNug59ptMEPW/ePHTv3h0mJiawsbFBZGQkkpKSnjpPdHQ0eDyexiAWi1soYk1KQ2MAgKqsSCfrJ4Q8W5489j05zJo1q1HLPXPmDMaPH1/v8qGhoUhPT4epqWmj1kfqR6dXcR89ehRRUVHo3r07FAoF/vvf/6Jv3764evUqjIyMap1PJpNpJHJdtSGphI9iLKcaNCGk+aWnp6tfb9y4ETNmzNA4FhobG6tfM8agVCrr9dxha2vrBsUhFAphZ2fXoHlIw+m0Br13716MHj0a3t7e8PX1RXR0NNLS0nDu3Lmnzsfj8WBnZ6ce6vvoLm1jQu7LwCunGjQhpPk9ftwzNTXVOBZeu3YNJiYm2LNnDwIDAyESiXD8+HEkJydj0KBBsLW1hbGxMbp3744DBw5oLPfJU9w8Hg+rVq3C4MGDIZVK4eHhgR07dqinP3mKu/JU9L59++Dl5QVjY2P069dP4weFQqHApEmTYGZmBktLS0ybNg2jRo1CZGRkg/bB8uXL0b59ewiFQnh6euKPP/5QT2OMYdasWXBxcYFIJIKDgwMmTZqknv7TTz/Bw8MDYrEYtra2GDJkSIPW3dL0qg06Pz8fAGBhYfHUckVFRXB1dYWzszMGDRqEK1eu1FpWLpejoKBAPRQWaq+2yxOZAAD4FcVaWyYhRDcYYygpV+hkYIxpbTs+++wzzJ8/H4mJiejatSuKiorw8ssv4+DBg7hw4QL69euHgQMHIi0t7anLmT17NoYOHYpLly7h5ZdfxogRI5CTk1Nr+ZKSEixcuBB//PEHjh07hrS0NHzyySfq6d988w3WrVuHNWvWIDY2FgUFBdi2bVuDtm3r1q346KOP8PHHH+Py5ct477338M477+Dw4cMAgL///huLFy/GypUrcePGDWzbtg0+PlwfFWfPnsWkSZMwZ84cJCUlYe/evejZs2eD1t/S9KajEpVKhcmTJyMsLAxdunSptZynpydWr16Nrl27Ij8/HwsXLkRoaCiuXLlS4/Od582bh9mzZzdLzPxHCdpAQQmakNautEKJzjP26WTdV+dEQCrUzuF4zpw5eOmll9TvLSws4Ovrq34/d+5cbN26FTt27MDEiRNrXc7o0aMxfPhwAMDXX3+NH374AadPn0a/fv1qLF9RUYEVK1agffv2AICJEydizpw56uk//vgjpk+fjsGDBwMAli5dit27dzdo2xYuXIjRo0fjgw8+AABMmTIFJ0+exMKFC/HCCy8gLS0NdnZ2CA8Ph6GhIVxcXBAUFAQASEtLg5GREV555RWYmJjA1dUV/v7+DVp/S9ObGnRUVBQuX76MDRs2PLVcSEgIRo4cCT8/P/Tq1QtbtmyBtbU1Vq5cWWP56dOnIz8/Xz1cvXpVazELJFyCNqQETQjRE926ddN4X1RUhE8++QReXl4wMzODsbExEhMT66xBd+3aVf3ayMgIMplM3Y1lTaRSqTo5A1xXl5Xl8/PzkZmZqU6WACAQCBAYGNigbUtMTERYWJjGuLCwMCQmJgIA3njjDZSWlsLd3R3jxo3D1q1boVAoAAAvvfQSXF1d4e7ujrfffhvr1q1DSUlJg9bf0vSiBj1x4kTs3LkTx44dq7EW/DSGhobw9/fHzZs3a5wuEokgEonU7wsKtHfPsqGE639bqNTvD5kQUjeJoQBX50TobN3a8uQFtp988gliYmKwcOFCdOjQARKJBEOGDEF5eflTl2NoaKjxnsfjQaVSNai8Nk/d14ezszOSkpJw4MABxMTE4IMPPsCCBQtw9OhRmJiY4Pz58zhy5Aj279+PGTNmYNasWThz5oze3sql0xo0YwwTJ07E1q1bcejQIbi5uTV4GUqlEgkJCVrpmLyhhFIuQYtUlKAJae14PB6kQgOdDM15J0psbCxGjx6NwYMHw8fHB3Z2drh9+3azra8mpqamsLW1xZkzZ9TjlEolzp8/36DleHl5ITY2VmNcbGwsOnfurH4vkUgwcOBA/PDDDzhy5Aji4uKQkJAAADAwMEB4eDi+/fZbXLp0Cbdv38ahQ4easGXNS6c16KioKPz555/Yvn07TExMkJGRAYD7MCs7HR85ciQcHR0xb948AFz7ynPPPYcOHTogLy8PCxYsQGpqKsaOHdvi8RuaWCNZZY87fGt4tPjaCSGkbh4eHtiyZQsGDhwIHo+HL7/88qk14eby4YcfYt68eejQoQM6deqEH3/8Ebm5uQ36cTJ16lQMHToU/v7+CA8Pxz///IMtW7aor0qPjo6GUqlEcHAwpFIp1q5dC4lEAldXV+zcuRO3bt1Cz549YW5ujt27d0OlUsHT07O5NrnJdJqgly9fDgDo3bu3xvg1a9Zg9OjRALiGfT6/qqKfm5uLcePGISMjA+bm5ggMDMSJEyc0fkG1FKFLAJ4v/w4CPg83GaM+fQkhemfRokV49913ERoaCisrK0ybNk2rTX31NW3aNGRkZGDkyJEQCAQYP348IiIiGvTEp8jISHz//fdYuHAhPvroI7i5uWHNmjXqHGJmZob58+djypQpUCqV8PHxwT///ANLS0uYmZlhy5YtmDVrFsrKyuDh4YH169fD29u7mba46XispRsJdOzu3btwdnbGnTt3Gtze/aQiuQJdZnJXfV6b2w9iLbYjEUKaV1lZGVJSUuDm5qaz3gifZSqVCl5eXhg6dCjmzp2r63C05mn/Vw3NP3pxkVhrJTUUgMcDGAMKyxSUoAkhpBapqanYv38/evXqBblcjqVLlyIlJQVvvfWWrkPTW5Sgm4APFfYLp0GCUpTkHwNMHHUdEiGE6CU+n4/o6Gh88sknYIyhS5cuOHDgALy8vHQdmt6iBN0UfAFceBkQoQJJRXkAKEETQkhNnJ2dq12BTZ6OEnQT/Vc6C8m5SnzGN9d1KIQQQtoQStBNlGzkj/icPBQqqP2ZEEKI9uhNV5+tlbGI+41TLFfoOBJCCCFtCdWgmyhEcRqdBIngZ0tAbdCEEEK0hRJ0E71Y9A+8DE/h8EMPAC/qOhxCCCFtBJ3ibiKlgTEAQFWmvedME0IIIZSgm0glfPTkmPIi3QZCCCH11Lt3b0yePFn9vl27dliyZMlT5+HxeNi2bVuT162t5TzNrFmz4Ofn16zraAmUoJuICbkaNCVoQkhzGzhwIPr161fjtH///Rc8Hg+XLl1q8HLPnDmD8ePHNzU8DbUlyfT0dPTv31+r62qrKEE3EU9kAgAQUIImhDSzMWPGICYmBnfv3q02bc2aNejWrRu6du3a4OVaW1tDKpVqI8Q62dnZQSQStci6WjtK0E3EF3E1aIGiWMeREELauldeeQXW1taIjo7WGF9UVIRNmzZhzJgxyM7OxvDhw+Ho6AipVAofHx+sX7/+qct98hT3jRs30LNnT4jFYnTu3BkxMTHV5pk2bRo6duwIqVQKd3d3fPnll6ioqADAPfZx9uzZuHjxIng8Hng8njrmJ09xJyQk4MUXX4REIoGlpSXGjx+PoqKqCs/o0aMRGRmJhQsXwt7eHpaWloiKilKvqz5UKhXmzJkDJycniEQi+Pn5Ye/everp5eXlmDhxIuzt7SEWi+Hq6qp+xDFjDLNmzYKLiwtEIhEcHBwwadKkeq+7Kegq7iYSiGUAAANFiY4jIYRoRXkjfmwLRIDg0eFUqQCUcoDHBwwldS+38jqWejAwMMDIkSMRHR2Nzz//XP2I202bNkGpVGL48OEoKipCYGAgpk2bBplMhl27duHtt99G+/btERQUVOc6VCoVXnvtNdja2uLUqVPIz8/XaK+uZGJigujoaDg4OCAhIQHjxo2DiYkJPv30UwwbNgyXL1/G3r171c9qNjU1rbaM4uJiREREICQkBGfOnEFWVhbGjh2LiRMnavwIOXz4MOzt7XH48GHcvHkTw4YNg5+fH8aNG1ev/fb999/ju+++w8qVK+Hv74/Vq1fj1VdfxZUrV+Dh4YEffvgBO3bswF9//QUXFxfcuXMHd+7cAQD8/fffWLx4MTZs2ABvb29kZGTg4sWL9VpvU1GCbiIDCXeKW6ikGjQhbcLXDg2f541owHsw9/raP8Cm0YBrD+CdXVVllvgAJdnV552V36BVvfvuu1iwYAGOHj2qfg7ymjVr8Prrr8PU1BSmpqb45JNP1OU//PBD7Nu3D3/99Ve9EvSBAwdw7do17Nu3Dw4O3L74+uuvq7Ubf/HFF+rX7dq1wyeffIINGzbg008/hUQigbGxMQwMDGBnZ1fruv7880+UlZXh999/h5ER90Nl6dKlGDhwIL755hvY2toCAMzNzbF06VIIBAJ06tQJAwYMwMGDB+udoBcuXIhp06bhzTffBAB88803OHz4MJYsWYJly5YhLS0NHh4e6NGjB3g8HlxdXdXzpqWlwc7ODuHh4TA0NISLi0u99qM20CnuJhIacTVokYpq0ISQ5tepUyeEhoZi9erVAICbN2/i33//xZgxYwAASqUSc+fOhY+PDywsLGBsbIx9+/YhLS2tXstPTEyEs7OzOjkDQEhISLVyGzduRFhYGOzs7GBsbIwvvvii3ut4fF2+vr7q5AwAYWFhUKlUSEpKUo/z9vaGQFDVnbK9vT2ysrLqtY6CggLcv38fYWFhGuPDwsKQmJgIgDuNHh8fD09PT0yaNAn79+9Xl3vjjTdQWloKd3d3jBs3Dlu3boVC0TI9R1INuomEUu60jVhVquNICCFa8d/7DZ9H8NhFT50GcsvgPVH/mZzQtLgeM2bMGHz44YdYtmwZ1qxZg/bt26NXr14AgAULFuD777/HkiVL4OPjAyMjI0yePBnl5eVaW39cXBxGjBiB2bNnIyIiAqamptiwYQO+++47ra3jcYaGhhrveTweVCqV1pYfEBCAlJQU7NmzBwcOHMDQoUMRHh6OzZs3w9nZGUlJSThw4ABiYmLwwQcfqM9gPBmXtlENuonERlyClqIEjDEdR0MIaTKhUcMHwWN1HYEBN+7x9uenLbcRhg4dCj6fjz///BO///473n33XXV7dGxsLAYNGoT//Oc/8PX1hbu7O65fv17vZXt5eeHOnTtIT09Xjzt58qRGmRMnTsDV1RWff/45unXrBg8PD6SmpmpurlAIpVJZ57ouXryI4uKqJsLY2Fjw+Xx4enrWO+ankclkcHBwqPaoy9jYWHTu3Fmj3LBhw/DLL79g48aN+Pvvv5GTkwMAkEgkGDhwIH744QccOXIEcXFxSEjQ3g+u2lANuokkxlyCNkYZSsqVMBLRLiWENC9jY2MMGzYM06dPR0FBAUaPHq2e5uHhgc2bN+PEiRMwNzfHokWLkJmZqZGMniY8PBwdO3bEqFGjsGDBAhQUFODzzz/XKOPh4YG0tDRs2LAB3bt3x65du7B161aNMu3atUNKSgri4+Ph5OQEExOTardXjRgxAjNnzsSoUaMwa9YsPHjwAB9++CHefvttdfuzNkydOhUzZ85E+/bt4efnhzVr1iA+Ph7r1q0DACxatAj29vbw9/cHn8/Hpk2bYGdnBzMzM0RHR0OpVCI4OBhSqRRr166FRCLRaKduLlSDbiKRsRkeMFNkMTMUldX/sn9CCGmKMWPGIDc3FxERERrtxV988QUCAgIQERGB3r17w87ODpGRkfVeLp/Px9atW1FaWoqgoCCMHTsWX331lUaZV199Ff/3f/+HiRMnws/PDydOnMCXX36pUeb1119Hv3798MILL8Da2rrGW72kUin27duHnJwcdO/eHUOGDEGfPn2wdOnShu2MOkyaNAlTpkzBxx9/DB8fH+zduxc7duyAh4cHAO6K9G+//RbdunVD9+7dcfv2bezevRt8Ph9mZmb45ZdfEBYWhq5du+LAgQP4559/YGlpqdUYa8Jjz9h52bt378LZ2Rl37tyBk5OTVpbpO3s/8ksrcGBKL3SwMdbKMgkhzausrAwpKSlwc3ODWCzWdTikjXja/1VD8w/VoLWAnglNCCFE2yhBa0Flgi6iBE0IIURLKEFrweyyedgtnA6WmajrUAghhLQROk3Q8+bNQ/fu3WFiYgIbGxtERkZq3Jxem02bNqFTp04Qi8Xw8fHB7t27WyDa2rko76AzPxXK4hp6CSKEEEIaQacJ+ujRo4iKisLJkycRExODiooK9O3bV+OeuCedOHECw4cPx5gxY3DhwgVERkYiMjISly9fbsHINW22/Qgjy6fhvshNZzEQQghpW3R60+7jTxMBuCeg2NjY4Ny5c+jZs2eN83z//ffo168fpk6dCgCYO3cuYmJisHTpUqxYsaLZY67JXfMgHEu+i2BV4zodIITojjZ7pCJEm/9PetWrRn4+12m8hYVFrWXi4uIwZcoUjXEREREajy9racYirrs3ukiMkNZDKBSCz+fj/v37sLa2hlAoVPfGRUhDMcZQXl6OBw8egM/nQygUNnmZepOgVSoVJk+ejLCwMHTp0qXWchkZGdV6mLG1tUVGRkaN5eVyOeRyufp9YWGhdgJ+jEdFIoYJ4iDLKQXQSevLJ4RoH5/Ph5ubG9LT03H/fiP63yakBlKpFC4uLuDzm96CrDcJOioqCpcvX8bx48e1utx58+Zh9uzZWl3mk3yzd2O44Wbsy6kAMKRZ10UI0R6hUAgXFxcoFIo6+40mpC4CgQAGBgZaOxOjFwl64sSJ2LlzJ44dO1Zn7yp2dnbIzMzUGJeZmVnrM0enT5+ucUr83r179e6Ttt6E3DOheRVF2l0uIaTZ8Xg8GBoaNvuTiQhpKJ1exc0Yw8SJE7F161YcOnQIbm51XwUdEhKCgwcPaoyLiYmp8XmlACASiSCTydSDiYmJVmJ/HF/Mde8pqKj96nNCCCGkIXRag46KisKff/6J7du3w8TERN2ObGpqComEe1TbyJEj4ejoiHnz5gEAPvroI/Tq1QvfffcdBgwYgA0bNuDs2bP4+eefdbYdfDGX9A0VlKAJIYRoh05r0MuXL0d+fj569+4Ne3t79bBx40Z1mbS0NI3nkoaGhuLPP//Ezz//DF9fX2zevBnbtm176oVlzc1AIgMAGCpLdBYDIYSQtkWnNej6PEjryJEj1ca98cYbeOONN5ohosYRPkrQIhUlaEIIIdpBfXFrgVDKJWgJJWhCCCFaQglaC8TGpgAACSuFUvVMPV6bEEJIM6EErQWVCdqIV0q9iRFCCNEKStBaUNkGbYwyFFOCJoQQogWUoLVB9KgNmleOotIyHQdDCCGkLaAErQ0iY/XL0qJ8HQZCCCGkrdCLrj5bPQMRiiFFKTNAaQl1VkIIIaTpqAatJe/YbUE3+Qrk8Gt/VCYhhBBSX5SgtcRIJABAz4QmhBCiHZSgtcRYzD0Jp6iMEjQhhJCmowStJUNyfsZG4RyYZcbpOhRCCCFtACVoLXGSJyOYfw3Covu6DoUQQkgbQAlaSy66jMLE8g9xVdxV16EQQghpAyhBa0mObQh2qkJwV2Wt61AIIYS0AZSgtcREzN1STl19EkII0QZK0FpiU3EP/fmnYFt4WdehEEIIaQMoQWuJS+YBLBd+jxcKd+o6FEIIIW0AJWgtMXz0RCuhskjHkRBCCGkLKEFriaGUS9AiZYmOIyGEENIWUILWEnWCZqU6joQQQkhbQAlaS8RGZgAAKStFuUKl22AIIYS0epSgtURsxNWgjXhldKsVIYSQJmtUgr5z5w7u3r2rfn/69GlMnjwZP//8s9YCa20MJKYAAGOU0hOtCCGENFmjEvRbb72Fw4cPAwAyMjLw0ksv4fTp0/j8888xZ84crQbYaoiMAQBGKENRWYWOgyGEENLaNSpBX758GUFBQQCAv/76C126dMGJEyewbt06REdH13s5x44dw8CBA+Hg4AAej4dt27Y9tfyRI0fA4/GqDRkZGY3ZDO0ScgnakKdEcUmxjoMhhBDS2jUqQVdUVEAkEgEADhw4gFdffRUA0KlTJ6Snp9d7OcXFxfD19cWyZcsatP6kpCSkp6erBxsbmwbN3yweJWgAKCvO12EghBBC2gKDxszk7e2NFStWYMCAAYiJicHcuXMBAPfv34elpWW9l9O/f3/079+/weu3sbGBmZlZg+drVnw+ynhiiFkZ5EWUoAkhhDRNo2rQ33zzDVauXInevXtj+PDh8PX1BQDs2LFDfeq7Ofn5+cHe3h4vvfQSYmNjm3199VXGlwIAKkoLdBwJIYSQ1q5RNejevXvj4cOHKCgogLm5uXr8+PHjIZVKtRbck+zt7bFixQp069YNcrkcq1atQu/evXHq1CkEBATUOI9cLodcLle/LywsbLb4yvlSlCvyUV5K3X0SQghpmkYl6NLSUjDG1Mk5NTUVW7duhZeXFyIiIrQa4OM8PT3h6empfh8aGork5GQsXrwYf/zxR43zzJs3D7Nnz262mB63tPNa/H7qPj4Se7TI+gghhLRdjTrFPWjQIPz+++8AgLy8PAQHB+O7775DZGQkli9frtUA6xIUFISbN2/WOn369OnIz89XD1evXm22WCRiMQB6JjQhhJCma1SCPn/+PJ5//nkAwObNm2Fra4vU1FT8/vvv+OGHH7QaYF3i4+Nhb29f63SRSASZTKYeTExMmi0WExF3QoI6KiGEENJUjTrFXVJSok50+/fvx2uvvQY+n4/nnnsOqamp9V5OUVGRRu03JSUF8fHxsLCwgIuLC6ZPn4579+6pa+tLliyBm5sbvL29UVZWhlWrVuHQoUPYv39/YzZD6wKytuAXw31IfTgQQFddh0MIIaQVa1QNukOHDti2bRvu3LmDffv2oW/fvgCArKwsyGSyei/n7Nmz8Pf3h7+/PwBgypQp8Pf3x4wZMwAA6enpSEtLU5cvLy/Hxx9/DB8fH/Tq1QsXL17EgQMH0KdPn8ZshtbZltzAS4JzsCi5petQCCGEtHI8xhhr6EybN2/GW2+9BaVSiRdffBExMTEAuAuyjh07hj179mg9UG25e/cunJ2dcefOHTg5OWl12acOb8e2A0ehsPXDgkkjtbpsQgghrVtD80+jTnEPGTIEPXr0QHp6uvoeaADo06cPBg8e3JhFtgnlTqFYrzRAJ1XztXMTQgh5NjQqQQOAnZ0d7Ozs1E+1cnJyapFOSvSZMV0kRgghREsa1QatUqkwZ84cmJqawtXVFa6urjAzM8PcuXOhUqm0HWOrYabKQxg/Ae5lV3QdCiGEkFauUTXozz//HL/++ivmz5+PsLAwAMDx48cxa9YslJWV4auvvtJqkK2FWdZJrBPOwymlFxibCB6Pp+uQCCGEtFKNStC//fYbVq1apX6KFQB07doVjo6O+OCDD57ZBC0yMgUASFEKuUIFsaFAxxERQghprRp1ijsnJwedOnWqNr5Tp07IyclpclCtlVjKJWgjlFE7NCGEkCZpVIL29fXF0qVLq41funQpunZ9djvo4Iu5q7eNeWXU3SchhJAmadQp7m+//RYDBgzAgQMHEBISAgCIi4vDnTt3sHv3bq0G2KqIjAEARihFVhklaEIIIY3XqBp0r169cP36dQwePBh5eXnIy8vDa6+9hitXrtT6VKlngpCrQRvx5CgqK9dxMIQQQlqzRt8H7eDgUO1isIsXL+LXX3/Fzz//3OTAWqVHNWgAKCsuAGCtu1gIIYS0ao2qQZNaGIihAHfltrw4X8fBEEIIac0oQWsTjwc5XwoAkBcX6DgYQgghrRklaC2TC7gErSilBE0IIaTxGtQG/dprrz11el5eXlNiaRMqBFKgAlCUUYImhBDSeA1K0KampnVOHzny2X7MosLACACgKivUcSSEEEJaswYl6DVr1jRXHG2GypBL0IwSNCGEkCagNmgtiwtcBM+yaOzh99J1KIQQQloxStBa1tnNGXIIcS41FxXKZ/fRm4QQQpqGErSWdbaXwVRiiOJyJRLu0b3QhBBCGocStJbxb+zFCqNf8IbgCOKSs3UdDiGEkFaKErS2ZSUipGg/uvOScCL5oa6jIYQQ0kpRgtY2t554GPJf/KMKwdnbuSirUOo6IkIIIa0QJWhtc+oGy76f4ppRd8gVKlxIy9N1RIQQQlohStDNgMfjIbS9JQCGODrNTQghpBF0mqCPHTuGgQMHwsHBATweD9u2batzniNHjiAgIAAikQgdOnRAdHR0s8fZGINNr2OTcDbyEg/rOhRCCCGtkE4TdHFxMXx9fbFs2bJ6lU9JScGAAQPwwgsvID4+HpMnT8bYsWOxb9++Zo604bqVxKI7/zr6Zf+OYrlC1+EQQghpZRrU1ae29e/fH/379693+RUrVsDNzQ3fffcdAMDLywvHjx/H4sWLERER0VxhNopxn6moSFiHUP4VXDizH/49XtZ1SIQQQlqRVtUGHRcXh/DwcI1xERERiIuL01FET2HmjHPm3I8P09NLdBsLIYSQVqdVJeiMjAzY2tpqjLO1tUVBQQFKS0trnEcul6OgoEA9FBa23EMsCrpNQgUTwL3gFHDnTIutlxBCSOvXqhJ0Y8ybNw+mpqbqoXPnzi22bl+frtiifB4AUHF4foutlxBCSOvXqhK0nZ0dMjMzNcZlZmZCJpNBIpHUOM/06dORn5+vHq5evdoSoQIAbGVi/GP6JhSMD8NbB4B751ps3YQQQlq3VpWgQ0JCcPDgQY1xMTExCAkJqXUekUgEmUymHkxMTJo7TA3uHX2wTdWDe3NkPqCiJ1wRQgipm04TdFFREeLj4xEfHw+Au40qPj4eaWlpALja78iRI9XlJ0yYgFu3buHTTz/FtWvX8NNPP+Gvv/7C//3f/+ki/HoJbW+JZYpBUIIP3NgPrAgDLv8NqKgLUEIIIbXTaYI+e/Ys/P394e/vDwCYMmUK/P39MWPGDABAenq6OlkDgJubG3bt2oWYmBj4+vriu+++w6pVq/TuFqvHBbtZ4jbsMaNiNFRCEyDrKrD5XWBZMJB7W9fhEUII0VM6vQ+6d+/eYIzVOr2mXsJ69+6NCxcuNGNU2mVuJERnexnW3Q9HaPh4DCjZAZz8CWBKQOZUVbA4GzCy1F2ghBBC9EqraoNurbh+uYF/71QAvacBkxOAob8Dgke/jyrKgO99geU9gMIMHUZKCCFEX1CCbgGh7a0AAMdvPkR+SQUglgF2PlUFMi4BFSVASTZgZFM1/uJG4OZBLoETQgh5puj0FPezorubBQR8Hu7mlsJ3zn64WkrRxdEUXR1N4etshqB23cGfehPIuQXwH/1mUqmAfdO5pG0gAdr1ADqEAx4vAZbtdbtBhBBCmh2PPa0RuA26e/cunJ2dcefOHTg5OdU9g5asPJqMdafSkJZTUm3aS51t8dOIABgKHjuhUZYP7Pucq0EX3tecwdID6BjBDS4hgMCwmaMnhBDSVA3NP5SgW1heSTku3yvApXt5uHwvHwcSs1CuUGGAjz2+f9MPBoInWh0YA7ISgZsHuNu00uIA1WNPxxLJuFq1zxtA+z6AgbBlN4gQQki9UIKug64T9JOOJGVh/O/nUK5UIdLPAd8N9YOAz6t9hrJ8IPkwl6yv7wNKHlZNk1gAk84DEvPmD5wQQkiDNDT/0EViOtbb0wZL3/KHAZ+HbfH38d8tCVCpnvKbSWwKeEcCkT8Bn9xA6ch9qOg+ATC2BSzcNZPz6n7AqpeAgvSqcffOcRefZSdztXNCCCF6iS4S0wN9ve2w5E0/TFp/ARvP3oHQgI85g7zB49Veky6rUOLX4ylYdjgPUuFL2Djuv2gvKdYsdPfMo9PhjyXiixuB0yu512IzwDEAsPcDZA6AkRV3FbmRNfdaYg48JQZCCCHNhxK0nnilqwMqlCpM+esi/jiZCrlCiZEh7eDtINNI1IwxxFzNxP92JaovOCspV2LkmnPY/H4I7B9f6PANgELOnfquZOEGOAUB6ReBsjwg+RA31MTIGnANAzq/CnR5XevbTAghpHbUBq1nNpxOw2dbEtTvbUxE6O1pjRc8beBoLsGCfUn49wbX7mwrE+GjPh2x6t9buPWwGB42xvjrvRCYG9XjQjFlBZB5hTvlnXkFKM4Cih8CRY/+yvOrygaOBgZ+j8yCMvx7LR0DFPsh8XgBsPKgGjYhhNQTXSRWB31P0ABw4GomNp69g9ibD1FSXv2hGkIBH2Ofd0PUCx1gJDLA3dwSDFkeh4yCMvi7mGHd2GBIhU08OVJRCty/ANw+DjgHI98+FIOWHodZziVsE81AmYEMmJoMsejRj4EH1wGpJSC1oKRNCCE1oARdh9aQoCvJFUqcScnF4aQsHE7Kwq0HxQj3ssWXr3jB1dJIo+z1zEK8sSIO+aUV6NnRGqtGdoPQQDvXAKpUDGN+O4PDSQ/QjZ+E/xNsxgOYYr7kE3wU7oE3Ap1gsNAdKM0FBCKuPdvUCTCxA6RWjxK3OfdXYgEYiLgFW3XkEjrAXZ1ekA5IzLj5CCGkjaEEXYfWlKCfVK5QPTXpnk/LxYhfTqG0QomBvg74fpgf+E+7ZaueFu1Pwg+HbkJkwMdf74XgRlYRFu9Pwr18rgtST0shdignQFT2sI4lPWHo70DnQdzrK1uBTaMBl1Dg3T1VZTb+h+tJzcEPcPAH7LoCIuMmbxMhbVpJDnenhtgUMHMGDCX1m0+lBMqLAHkRUF4MVBRzZ9MqSrguhytKAaUcEBpzP66tOwHGNnUvlwBoeP6hi8RakbpqxAEu5lj+nwCM/e0s/rl4H6nZxfisXyeEdrBq9Dr3XcnAD4duAgDmveYDX2cz+DqbYaCvPdadTMPSwzeRlF2OTrwf8HmEO8Z0FYNXmA7k3wMK04HSHK670pKcR0M2oKrgFm6oeRYAEnPN28QqyoCkPdyV6Al/PRrJ4w4KLs8BrqHcYNq6fmiRZ4xCDuSlATkp3IWZZi6ARXvuTommNgepVMCDa0B6PHctSdZVrmOjwnTNcgN/AAJHca9vHwfO/ArYeAG9Pq0qM9+FO5PVEAO/565RAbhmrv1fAG49gdCJjd0i8hhK0G1Mb08bfP+mPz7dfBGX7ubjrVWn0LOjNT7r1wmdHWQNWtbNrEJ8/NdFAMA7Ye3wWkBVIhQZCPBuDzcM7e6Mr3cn4s9Tafjf3ltIznHBnEHPwdC1gafXvQdzw+N4fO5K9PvxXHv4/Qtct6cPErnh3BqunKkL4BrCnTI3c+F6VtOHzlpK87gHoZSXcA9IEcmq/opMAL5At/GplNyPJkUpl0QqHv01ED6K1ZSLk7qSbZxdnwDX9wL5d6Fxq2MloQlg0Y7rv8CiPdfHvkMAYNv56cu9Hcv1KHjnFDfUllRNHAB5AVcjfrzZqCAduLIFKH5eM0HzHvvO8g24WrLQCDCUAoZi7q+BGBAIAXkh16Rl8th9I3dOAjf2cbXtxxP0xre5piuZ46PBgVtWRQmgeFQrryjlXvN4AHiAR1+u5g9wPzySD3Pf7c6vVi13xyTu/1Up5y56VcgfLa/ksVp/KfdDX1nODa/9DPgMeRTvGWDHh9yP/YFLqpZb9IBrjuPrvpsQStBt0ICu9gh2t8CPB29g3ak0HLv+AP/eeIBIP0dMeakjnC2kdS6joKwC4/84hyK5AsFuFvjvy141ljMWGeCryC5ob22M/+26ivWn03AnpwTLRgTAVNLEA7uBkEu2Hi9VjSvM5O7vTosDUk9wt4vlpwGX0qrKTDxblaD//Y679ztwNBDyATeurAA4vpg7NWdkzQ3GNtw8QmPu4NGYL2f+PeDSRi6m9Hgg93YdM/C4JD3lGmBszY2KmQHE/wmETgLCJnHjHt4E/hz6aJZHBzAej4tTYsbdz175V2RSdaANHF3Vxp+wGUjaDXTsD3R9gxuXkwIsDax7uwwkXG3PsgNg7QkET+Bu1yNV5IVcz35dXq+qFZfmAvl3uNeGRtw+E5txten8O0B5IZCRwA2Vuo8FBnzHvX5wHVj7OveZfnCiqsyOD4Gc5Kr3hlKu+ce2C1crtvXmzjKJZVxnRKW5mqe4HQOAiHnVP8MJx7nPWmTMJeGG1u7b9QD6fVP1vwxwp8kTdzRsOQAwYnNVgr57Ftj/OdCxn2aCvriBS84Noayoev3wOvdD/8lT9Ct6cD96LDtwd6o4BursjAAl6DbKyliE2YO64N0ebli4/zr+uXgfWy/cwz8X7+ONbs6IeqE9nMxrTtQpD4sxa8cV3HpQDHtTMZY9+SCPJ/B4PIzp4QZXCykmbbiA4zcfYsjyE1g9unu9fgw0iIkt4PUKNwBcW9nd09yv4dzb3IHv8VPeeXeAh0matYyC+8DxRU9fj6ERIJRW1SAMxMCQ1VUHtRNLgQtrgYC3gZAoblzJQ+DgbM3lmLlwv8bLCrjaTFnBYwcVxp2+f7wmXVYAFD/gagKVlOWaB+T68hpYlaAzrwCX/+ZiqUzQhmLur0DEvTaQcBfwKeRcrBWPHuyiKOX2a/4d4NZhIGBU1TrOrAIu/QX4Dge6vcONKy/h4lUpuFOwTMklCiMr7rOpvEhQHxU/5E4Z59/lPnORSdUZD5Ex91mU5nG1Uree3DyKcuAHf+5zM3MBnIO48T0mA0Hjuf8ZI2vNhKeQA7mp3BPscpK5v9nJXA26Eo/P/fgUmWrG2GkAl+RdngOcg7lH19Z2loPHq/ofqGTZvurH6uOa2lRk4Q48N+HJAIBXl3LfuYJ7j4b73P+3odGjmrmk6n8PAJiK21+VrDwAn6GAva/mol/8gts+gZDbfoGQW5ah9LG/j767BkKAb8j9kK3UMQL4z9/c/3+l8uKqZrjMBG4ozNBZgqaLxJ4RCXfz8e2+a+p7qA34PI1EXSRXYNel+9h09i7OpuYC4Nq8N70XAl9ns3qv5/K9fIz57QwyC+SwNBJi8/uhcLMyqnvG5pJ7mzuYmTpxBxCAe3/iR+6Aqr73+wFX06jpVGSlSReqlnHof8CxBUC3d4FXFnPjFOXAtgnchWz2vtzw5MEReJQAi7gExpSAsV1Vjb3gUbt9Za0e4A4a6ZeqYmOMe11ezCWLsjwu9tI8riZXqc+XVac2005yNRGnbtyBvXI5jNV+tkBZwS1PXsAdpB5eBx4kAX1mVB1Mt0UB8WuB3tOB3p9x4+6dA355sfb9aGwLmDo/+kzcAJvOXM3PulPLnE5njDu9L3hUP0mNAw7N5RJzSXY9F8IDZuRU7but73M/FPt9A3iEayfOijLuh5VQyu0f0jKUCiAvlft/f3iD+y76DdfKoukq7jo8qwm60pnbOfj+wA0cv8klakMBD8+5W+Ls7VyUVnD3XPN5QM+O1ni/V3sEu1s2eB0Z+WV4J/oMEtMLENTOAhvGP6eVq8mbHWPcL/vyYq6GVF7MJVLFo3Yst57cwRLgvrj5d7hHf1aeinsWPbzJnc636VzVdpp5BfhjMMATcG2ZlUmsMJPbl7WZFF91huL0L8Cd04DvMO456ABX44z9njurUTkYSrmkrl6XgBuUFVVXIwdPqHrK255pXBNCxFdAwEhuXGocsKbfoyB4gLkrYObK1ZblRdwPFHkhNwiEVc0JY2O4GABumtCY+gAgT0VXcZOn6t7OAmvHBmsk6spatbu1EYYEOuE1fyfYmYobvQ47UzF+fjsQEUuO4fTtHKw7nYa3n3PV1iY0Hx7v0amxR22uT2PlwQ3POqsO3PA4W2/gk+vVyzLGXZSWn8adQs67A2Tf5K48zk3lkmKl1BPchUyOgVUJOv8ucPbXhsfoO/yxdlEel3AfJGnG+9ovXPu6pUfVj7CGEJk0fB5C6kA16Gfcmds5OJ2Sg+fcLRHgYvbUB3Q01JrYFMz+5yqMhALsn9ILjmb1vBcTXC1856X7eN7DGp52dPB75lzfx52laNeDuwce4Jor4v98dIajmGsjLy/mTldXNhdUtnvzBVzSFBoDEV8DRo/OBOWkcGdJLNz1uy2ctEl0irsOlKBbjlLFMHRlHM6l5qK3pzXWjO5e5w+Am1mFWHn0FrbF30OFksFcaogdE3to/2IzQghpYfQ8aKI3BHwevnndB0IBH0eSHmB7/P1ay569nYOxv51F+KJj2HTuLiqUDCYiA+SWVGDc72dRUq5owcgJIUT3qA2aNKsONiaY1KcDFu6/jtn/XEEPDytYGXOnFlUqhoPXsrDiaDLOPbpynMcD+na2xXu92sPeVIyBP8biWkYhPtl0EcveCqixBp5XUo7N5+42+nS4SsWw7nQartzLh52pGA5mEjiaSeBgJoG9qRhiQx13KEIIeSbpRYJetmwZFixYgIyMDPj6+uLHH39EUFBQjWWjo6PxzjvvaIwTiUQoKyursTzRvfd6tcfOS+m4llGI2f9cxXdv+GLHxftYeTQZN7KKAHBP6HotwBHjerqjvXVVX9sr/hOA4b+cxO6EDCw7fBMTX9S8MOtwUhambb6ErEI5RAZJmP+6Dwb717/pIr+0AlM2xuPgtawapwv4PLwe4Ihp/TrB0pjaLAkhLUfnCXrjxo2YMmUKVqxYgeDgYCxZsgQRERFISkqCjU3NnbDLZDIkJVVdhanNC5uI9hkK+FgwxBeDlh3HPxfvIy45Gw+LuM46TEQGGPGcK94NawcbWfUrx7u1s8CcQV0wfUsCFu6/Dk87GV7qbItiuQL/25WI9ae5HsSMhAIUlyvxfxsv4uKdfHw+wOupnasAwLWMArz3xzmkZpdAaMDHyOdcUVyuwL28MtzPK8W93FKUVijx19m72HclE1MjPDE8yAWC1nDLGCGk1dP5RWLBwcHo3r07li5dCgBQqVRwdnbGhx9+iM8++6xa+ejoaEyePBl5eXmNWh9dJKY78/YkYuXRWwC4ns7G9HDDiOdcIBPX3TnFl9su44+TqTAWGWDWq9744eANpOVwPV29E9YOn/T1xMqjyeoHewS1s8DSEf6wMan5drHt8ffw2d8JKK1QwtFMghX/CYSPk2aPTYwxnEvNxZfbryAxvQAA0NXJFP+L7IKuTmaN3Q2EkGdUq7qKu7y8HFKpFJs3b0ZkZKR6/KhRo5CXl4ft27dXmyc6Ohpjx46Fo6MjVCoVAgIC8PXXX8Pb27te66QErTtlFUqsOJoMO5kYkf6ODWrbrVCqMGLVKZxOyVGPczSTYMEbXRHavuqe5f1XMjDlr4sokitgKxPhf5E+MJUYolyhglyhRLlChZO3svFbXCoA4HkPK3z/pj8sjIS1rluhVOGPk6lYtP86CuUK8HjAqJB29aqlE0JIpVbVUcnDhw+hVCpha2urMd7W1hbXrl2rcR5PT0+sXr0aXbt2RX5+PhYuXIjQ0FBcuXKlxg2Wy+WQy6s6VC8sLKxWhrQMsaEAk8M7NmpeQwEfy0cEYNCyWNzNLcXQbk748pXOMHmi9t3X2w7bJxrjvT/O4WZWEcb9frbWZX7Quz0+7utZ5ylrAwEf74S5YUBXe8zbfQ1bL9xD9InbuJtbgqVvBdBFZISQZqHzNuiGCgkJQUhIiPp9aGgovLy8sHLlSsydO7da+Xnz5mH27NnVxpPWx9JYhF2TnkdmQRk62tZ+tXZ7a2NsiwrD7B1XEHcrG0IBH0IDPkQG3F+p0AD/ec4VL3W2rXUZNbExEWPxMD/072KHD9dfwIHELIxafRqrRnWr9kOBEEKaSqcJ2srKCgKBAJmZmRrjMzMzYWdnV8tcmgwNDeHv74+bN2/WOH369OmYMmWK+v29e/fQuXMdz1slestUYlivx1gaiwyw4A3fOss1Rl9vO/z+bhDG/nYWp1JyMPyXk4h+J0h9+xghhGiDThO0UChEYGAgDh48qG6DVqlUOHjwICZOrN/jvZRKJRISEvDyyy/XOF0kEkEkqjpwFhQUNDluQoLdLbF+/HMYtfo0Lt8rwNAVcfhjbDAczSTIL63AlXv5SLiXj8v3C1AsV8BYZAAjkQGMRQIYiQxgaSTEgK4OT237JoQ823R+invKlCkYNWoUunXrhqCgICxZsgTFxcXqe51HjhwJR0dHzJs3DwAwZ84cPPfcc+jQoQPy8vKwYMECpKamYuzYsbrcDPIM6uJoik0TQvD2r6dx62ExBi2NhbFIgNvZJfWa/9u9SZjQuz3eCWsHqVDnX0VCiJ7R+VFh2LBhePDgAWbMmIGMjAz4+flh79696gvH0tLSwH/sebW5ubkYN24cMjIyYG5ujsDAQJw4cYJOWxOdcLc2xub3Q/CfVaeQ/KAYD7l+V+BkLoGPoym6OJrCyliIIrkSxXIFiuUKFMkVOJeai2sZhViwLwm/nbiNyeEdMbSbEwya4arwsgolCsoqUKFkKFeoUKFUoVyhgrHIAO1a4FndjDHczS2Fo5mkdTx2lBA9ofP7oFsa3WZFmkNeSTl2XkqHi4UUPo6mMK/j1LVKxfDPpftYsC8Jd3O5ZyS7Wxsh3MsWRXIFCssUKCyrQGGZAgIeD28GOWOQn2O9O0lhjOFsai7Wn0rDroR0yBWqGssFu1lgQq/26O1prdUOf0rKFYi9mY1D1zJx6FoWMgvkeN7DCivfDqSzBeSZ1arug9YFStBEn8gVSvx5Kg0/HrqJnOLyp5b1sDHGx309EeFtW2syzS+pwJYLd7H+dBquZxZpTBMa8CES8GFowIdQwMfDIjkUKu7r72lrgvE93THQ1wFCg8bX4vddycD602k4kZyN8hp+FHRvZ47Vo7u36FXviekF+PvcXVy8m4dP+3VC93YWLbZuQh5HCboOlKCJPiosq8Cfp9KQVSiHidgAJmJD7q/IALceFuPnY7eQX1oBgOvNbGqEJ7wdTHE9sxA3MgtxPbMI1zMLEX8nT11bFhvy8aqvA4YHucDPufqzvtPzS7Em9jb+PJWGIjn3tDA7mRgf9+2IIYFODapRF5RVYOb2K9h64Z56nJO5BH062eBFL1sIBXyM/+MsCssU6Opkit/fDYKZtPkukMsqLMOO+Pv4+/w9dS9wAOjxpUSnKEHXgRI0aY3ySyuw6t9b+PV4CkrKlU8t28nOBG8FuyDS37Fe3ajml3I/DlbHpuBBIdepzwAfe3w92Aem0rrnP3krGx//dRH38krB5wFjn3fHkEAneNgYayT5y/fy8favp5BbUoFOdib4Y0wwrE2afmuaSsWQkl2My/fycflePi7dzcfZ1FwoH50dEAr46ONlg7ScEly5XwBPWxP8/UEojEV0qp20LErQdaAETVqzh0Vy/HQ4GWtPpqJcqYKTuQQdbU0eDcbwspehk51Jo9qT5QolVv2bgsUx16FQMTiaSbDkTb9aTwnLFUos2n8dP/97C4wBrpZSLBrqh0BX81rXcT2zECNWncKDQjncrYywblww7E0lDY6VMYZdCen4PS4VV+7lo7iGHy0BLmZ4LcAJr3S1h5lUiPT8Ury6NBYPCuXo29kWK/4TWONFa1kFZTiVkoM+XjbUXk60ihJ0HShBk7agpFwBxgCjZqgFXryTh0kbLiA1uwR8HvDhix748MUOUDHg1sMiJKYXIDG9EIevZakfF/pmd2d8+UrnesVz+2ExRqw6hXt5pTARGaCjnQnaWRqhnaUU7ayM4GZlBE87k1r7OT9x8yHm772GS3fz1ePEhnx0tpehy6Mr54PaWdR4hfr5tFy8ufIkypUqTOrjgSkvVXU9W6FUITr2NpYcuI7iciXaWUrxXR0/OBhjkCtU1N0rqRdK0HWgBE1I3YrkCszYfhlbznNtyjYmIuSWlKNCqXm4sDASYv5rPujrXb+e/yrdyyvFyF+5W9NqIhUK0K2dBULcLfGcuwV8HE1xPbMI8/dew7HrDwBwjxgd19MdA3zs4W5tXO8r3DedvYOpmy8BAJaPCEB/H3vEJWdjxvbLGs8nL1eqwOdxzzOfHO4BkUFVEpYrlNgRfx+rY2/jemYh3n7OFR/37UhdvpKnogRdB0rQhNTf9vh7+GLrZRQ+uojMRGSATvYm8LKXwctehr6dbWHZyC5OK5QqJGUUIuVhMVKzi5HysAS3s4txM6tIfUFcJSOhACUVSjAGGAp4GBHsiokvdmh096pzd17Fr8dTIDEUoGdHK+y7wnU3bGEkxLR+nojwtsOcf65iy6OL3jrZmeC7ob6wlYmx9mQq1p5MxcMizavubUxEmDGwMwb42NMz6kmNKEHXgRI0IQ3zoFCOq+kFcLcygpO5pNmTj0rFkJRZiJO3shGXnI1TKTnqhP2qrwM+7tsRrpZN62BFoVThnegz+PfGQwAAnweMCOZqwY9fXb73cgY+35qA7OJyGPB54PN56tvH7E3FGBXaDu2tjfH17kSkPOTOBvTsaI25g7ybHGNzu59XirjkbMTdysbFO3lQMQaRgUDjwTImYgNYG4tgbVI12JiI0dHWpEm34z2rKEHXgRI0Ia1LZcKWCgVaTXr5JRUY/8dZGAh4mN7fC10cTWss97BIjs+3Jqhr2X7OZhjTww39utip28krn3X+0+FklCtVEBnw0dvTGhZGQphJhTCTGMJcKoStqRjBbhZPbbNmjOHyvQLcfFAIO5kETuYS2JuKq/Uyp1Cq8LCoHJkFZcgqlCOnWI6HReXILipHTrEcOSUVMOTzYCQygJFIAKmQ6w/+QWEZ4pKz690lbU3EhnwEupoj2M0Sz7lbwtfZVKMJgDGG0golisoUyCkpR05RObKLy5FTzP2tUKpgKOBDKODBQMCHoYAPE5EB+nrbNuvtd7pGCboOlKAJIQ3FGEPszWwYiw3g52xWa7lbD4owY/sVHL/5sNYyJiIDvORti4G+DujRwUqd5NOyS7At/h62xd/DrSfa5vk8wN5UAgczMUrKlcgskCO7WI6mHL35PMDH0RQh7a0Q7GYBiVCAcoUKcoXq0V8lCkor8KBIjgeFj4YiOe7mliKvRLMJQmTAh72pGMXlSpTIFermiIYyEgrwdkg7jH3erU0+HY4SdB0oQRNCmlNlMr/1sAh5JRXILSlH/qO/1zIKkZ5fpi5rJjVEuJctbj0owvm0PPV4kQEfvk5meFgkx9280hp7ZQMAAZ8Hm0ennq2MRbAwEsLSWAgrIxHMpIZQqhiKyx/1A1+uQIlcCYlQgGA3C3R3s6jXffI1bd/NrCKcvJWNk7dycColu1p7fCUeDzCXCmFhxA2Wj/4KDfhQKBkqlCpUPPqblFGIpMxCAFwNfUSwK8b3dIeNiQhpOSU4l5qrHu7llsLX2QxhHazQo4MVOjvI6n2RYG3KFSrczyuFQsWgYtygVDEwxsXTwab2Z9DXFyXoOlCCJoToikrFcD4tF/9cvI9dCRl4WCRXT+PzgLAOVoj0c0REFzt1RyoqFcPDIjnu5Jbifl4pjEQC2JiIYSsTw9JIqPMHkDDGkPygGLkl5TASPn46XQCxgaDe8THGcCAxCz8euqG+hU5owIdMbFDrD4BKZlJDhLhbop2VEYQCrv3cUMCDUMCHRCiArUwMe1MJ7M3EMBEZgMfjoaxCifg7eTj16EfG+bRclFXU/EPIz9kM26LCGrZjakAJug6UoAkh+kCpYjh1KxuHk7JgZyrBwK72sJGJdR2WzjHGcOzGQ/x48AbOpuYC4G576+IoQ6CrOQJdzeFoJsXZ1BzE3szGyVvZ6q5q68PoUcKu6cyExJC7SE7A54HPA/g8HgR8HrwdZFg1qnuTt40SdB0oQRNCiP5jjOHK/QLIFSp0cZRpXIT2OIVShYt383HyVjayi8pRrlSiQsGdNpcrVSiWK5CRX4aMgrJqbefWJiIEu1kg2N0Sz7lZoMMT3dNqW0PzD/VjRwghRO/weLxar6x/nIGAr65Z16WkvCpZ28nEcLMy0ut71ilBE0IIeSZIhQZwtzaGu7WxrkOpF7rTnBBCCNFDlKAJIYQQPUQJmhBCCNFDlKAJIYQQPUQJmhBCCNFDz9xV3CoVd2N6enq6jiMhhBDyLKnMO5V5qC7PXILOzOSeSBMUFKTjSAghhDyLMjMz4eLiUme5Z64nMYVCgQsXLsDW1hZ8ftPO8BcWFqJz5864evUqTEya3pF6a0LbTttO2/7soG3XzrarVCpkZmbC398fBgZ114+fuQStTQUFBTA1NUV+fj5kMpmuw2lRtO207bTtzw7adt1sO10kRgghhOghStCEEEKIHqIE3QQikQgzZ86ESCTSdSgtjradtv1ZQ9tO297SqA2aEEII0UNUgyaEEEL0ECVoQgghRA9RgiaEEEL0ECXoOixbtgzt2rWDWCxGcHAwTp8+/dTymzZtQqdOnSAWi+Hj44Pdu3e3UKTa15Btj46OBo/H0xjEYnELRqs9x44dw8CBA+Hg4AAej4dt27bVOc+RI0cQEBAAkUiEDh06IDo6utnjbA4N3fYjR45U+9x5PB4yMjJaJmAtmTdvHrp37w4TExPY2NggMjISSUlJdc7XFr7vjdn2tvJ9X758Obp27QqZTAaZTIaQkBDs2bPnqfO05GdOCfopNm7ciClTpmDmzJk4f/48fH19ERERgaysrBrLnzhxAsOHD8eYMWNw4cIFREZGIjIyEpcvX27hyJuuodsOADKZDOnp6eohNTW1BSPWnuLiYvj6+mLZsmX1Kp+SkoIBAwbghRdeQHx8PCZPnoyxY8di3759zRyp9jV02yslJSVpfPY2NjbNFGHzOHr0KKKionDy5EnExMSgoqICffv2RXFxca3ztJXve2O2HWgb33cnJyfMnz8f586dw9mzZ/Hiiy9i0KBBuHLlSo3lW/wzZ6RWQUFBLCoqSv1eqVQyBwcHNm/evBrLDx06lA0YMEBjXHBwMHvvvfeaNc7m0NBtX7NmDTM1NW2h6FoOALZ169anlvn000+Zt7e3xrhhw4axiIiIZoys+dVn2w8fPswAsNzc3BaJqaVkZWUxAOzo0aO1lmlL3/fH1Wfb2+r3nTHGzM3N2apVq2qc1tKfOdWga1FeXo5z584hPDxcPY7P5yM8PBxxcXE1zhMXF6dRHgAiIiJqLa+vGrPtAFBUVARXV1c4Ozs/9VdoW9NWPvem8PPzg729PV566SXExsbqOpwmy8/PBwBYWFjUWqatfu712Xag7X3flUolNmzYgOLiYoSEhNRYpqU/c0rQtXj48CGUSiVsbW01xtva2tbavpaRkdGg8vqqMdvu6emJ1atXY/v27Vi7di1UKhVCQ0Nx9+7dlghZp2r73AsKClBaWqqjqFqGvb09VqxYgb///ht///03nJ2d0bt3b5w/f17XoTWaSqXC5MmTERYWhi5dutRarq183x9X321vS9/3hIQEGBsbQyQSYcKECdi6dSs6d+5cY9mW/syfucdNkuYREhKi8aszNDQUXl5eWLlyJebOnavDyEhz8vT0hKenp/p9aGgokpOTsXjxYvzxxx86jKzxoqKicPnyZRw/flzXobS4+m57W/q+e3p6Ij4+Hvn5+di8eTNGjRqFo0eP1pqkWxLVoGthZWUFgUCgfn50pczMTNjZ2dU4j52dXYPK66vGbPuTDA0N4e/vj5s3bzZHiHqlts9dJpNBIpHoKCrdCQoKarWf+8SJE7Fz504cPnwYTk5OTy3bVr7vlRqy7U9qzd93oVCIDh06IDAwEPPmzYOvry++//77Gsu29GdOCboWQqEQgYGBOHjwoHqcSqXCwYMHa22fCAkJ0SgPADExMbWW11eN2fYnKZVKJCQkwN7evrnC1Btt5XPXlvj4+Fb3uTPGMHHiRGzduhWHDh2Cm5tbnfO0lc+9Mdv+pLb0fVepVJDL5TVOa/HPvFkuPWsjNmzYwEQiEYuOjmZXr15l48ePZ2ZmZiwjI4Mxxtjbb7/NPvvsM3X52NhYZmBgwBYuXMgSExPZzJkzmaGhIUtISNDVJjRaQ7d99uzZbN++fSw5OZmdO3eOvfnmm0wsFrMrV67oahMarbCwkF24cIFduHCBAWCLFi1iFy5cYKmpqYwxxj777DP29ttvq8vfunWLSaVSNnXqVJaYmMiWLVvGBAIB27t3r642odEauu2LFy9m27ZtYzdu3GAJCQnso48+Ynw+nx04cEBXm9Ao77//PjM1NWVHjhxh6enp6qGkpERdpq1+3xuz7W3l+/7ZZ5+xo0ePspSUFHbp0iX22WefMR6Px/bv388Y0/1nTgm6Dj/++CNzcXFhQqGQBQUFsZMnT6qn9erVi40aNUqj/F9//cU6duzIhEIh8/b2Zrt27WrhiLWnIds+efJkdVlbW1v28ssvs/Pnz+sg6qarvHXoyaFye0eNGsV69epVbR4/Pz8mFAqZu7s7W7NmTYvHrQ0N3fZvvvmGtW/fnonFYmZhYcF69+7NDh06pJvgm6CmbQag8Tm21e97Y7a9rXzf3333Xebq6sqEQiGztrZmffr0USdnxnT/mdPTrAghhBA9RG3QhBBCiB6iBE0IIYToIUrQhBBCiB6iBE0IIYToIUrQhBBCiB6iBE0IIYToIUrQhBBCiB6iBE0IIYToIUrQhJBmwePxsG3bNl2HQUirRQmakDZo9OjR4PF41YZ+/frpOjRCSD3R86AJaaP69euHNWvWaIwTiUQ6ioYQ0lBUgyakjRKJRLCzs9MYzM3NAXCnn5cvX47+/ftDIpHA3d0dmzdv1pg/ISEBL774IiQSCSwtLTF+/HgUFRVplFm9ejW8vb0hEolgb2+PiRMnakx/+PAhBg8eDKlUCg8PD+zYsUM9LTc3FyNGjIC1tTUkEgk8PDyq/aAg5FlGCZqQZ9SXX36J119/HRcvXsSIESPw5ptvIjExEQBQXFyMiIgImJub48yZM9i0aRMOHDigkYCXL1+OqKgojB8/HgkJCdixYwc6dOigsY7Zs2dj6NChuHTpEl5++WWMGDECOTk56vVfvXoVe/bsQWJiIpYvXw4rK6uW2wGE6Ltme04WIURnRo0axQQCATMyMtIYvvrqK8YY94jBCRMmaMwTHBzM3n//fcYYYz///DMzNzdnRUVF6um7du1ifD5f/UxwBwcH9vnnn9caAwD2xRdfqN8XFRUxAGzPnj2MMcYGDhzI3nnnHe1sMCFtELVBE9JGvfDCC1i+fLnGOAsLC/XrkJAQjWkhISGIj48HACQmJsLX1xdGRkbq6WFhYVCpVEhKSgKPx8P9+/fRp0+fp8bQtWtX9WsjIyPIZDJkZWUBAN5//328/vrrOH/+PPr27YvIyEiEhoY2alsJaYsoQRPSRhkZGVU75awtEomkXuUMDQ013vN4PKhUKgBA//79kZqait27dyMmJgZ9+vRBVFQUFi5cqPV4CWmNqA2akGfUyZMnq7338vICAHh5eeHixYsoLi5WT4+NjQWfz4enpydMTEzQrl07HDx4sEkxWFtbY9SoUVi7di2WLFmCn3/+uUnLI6QtoRo0IW2UXC5HRkaGxjgDAwP1hVibNm1Ct27d0KNHD6xbtw6nT5/Gr7/+CgAYMWIEZs6ciVGjRmHWrFl48OABPvzwQ7z99tuwtbUFAMyaNQsTJkyAjY0N+vfvj8LCQsTGxuLDDz+sV3wzZsxAYGAgvL29IZfLsXPnTvUPBEIIJWhC2qy9e/fC3t5eY5ynpyeuXbsGgLvCesOGDfjggw9gb2+P9evXo3PnzgAAqVSKffv24aOPPkL37t0hlUrx+uuvY9GiRepljRo1CmVlZVi8eDE++eQTWFlZYciQIfWOTygUYvr06bh9+zYkEgmef/55bNiwQQtbTkjbwGOMMV0HQQhpWTweD1u3bkVkZKSuQyGE1ILaoAkhhBA9RAmaEEII0UPUBk3IM4hatgjRf1SDJoQQQvQQJWhCCCFED1GCJoQQQvQQJWhCCCFED1GCJoQQQvQQJWhCCCFED1GCJoQQQvQQJWhCCCFED1GCJoQQQvTQ/wNggCsK1auBggAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAeoAAAEiCAYAAAA21pHjAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABh+ElEQVR4nO3dd1gUV9sH4N/usrvsAkvvHUVAREQFA2jUiD0qmqgxRjGxvEkw6muixs/E+iaaaIwpxpIiKcZeY2zYE8QOKorYaCJFpXfYPd8fI4MriJSFBXzu65qL3ZkzM88Z1312zpyZI2CMMRBCCCGkWRJqOwBCCCGEPBslakIIIaQZo0RNCCGENGOUqAkhhJBmjBI1IYQQ0oxRoiaEEEKaMUrUhBBCSDNGiZoQQghpxihRE0IIIc0YJWpCWpGEhAQIBAJER0drOxRCiIZQoiakmREIBDVOCxcu1HaIhJAmpKPtAAgh6lJTU/nXW7Zswfz58xEXF8fP09fX10ZYhBAtoTNqQpoZKysrfjI0NIRAIODfW1hYYOXKlbCzs4NUKkWnTp1w8ODBZ25LqVTinXfegbu7O5KSkgAAe/bsQefOnaGrqwsXFxcsWrQI5eXl/DoCgQA//fQThg8fDrlcDldXV+zdu5dfnpWVhbFjx8Lc3BwymQyurq7YsGHDM2PYvn07vLy8IJPJYGpqiqCgIBQUFPDLf/rpJ3h4eEBXVxfu7u744Ycf1NZPTk7GqFGjYGRkBBMTEwwbNgwJCQn88gkTJiA4OBgrVqyAtbU1TE1NERoairKyslofc0KaNUYIabY2bNjADA0N+fcrV65kCoWCbdq0id24cYPNnj2bicVidvPmTcYYY/Hx8QwAi4qKYsXFxWz48OHMx8eHZWRkMMYYO3XqFFMoFCwsLIzduXOHHT58mDk5ObGFCxfy+wDA7Ozs2J9//slu3brFpk2bxvT19dmjR48YY4yFhoayTp06sfPnz7P4+HgWHh7O9u7dW2389+/fZzo6OmzlypUsPj6eXblyha1evZrl5eUxxhj7448/mLW1NduxYwe7e/cu27FjBzMxMWFhYWGMMcZKS0uZh4cHe+edd9iVK1fY9evX2Ztvvsnc3NxYSUkJY4yxkJAQplAo2LvvvstiY2PZX3/9xeRyOVu/fr1m/zEI0RJK1IQ0Y08nahsbG/bZZ5+plfH19WXvv/8+Y6wyUf/zzz+sT58+rHv37iw7O5sv26dPH/b555+rrf/7778za2tr/j0A9sknn/Dv8/PzGQB24MABxhhjQ4YMYW+//Xat4r948SIDwBISEqpd3qZNG/bnn3+qzVuyZAnz9/fnY3Nzc2MqlYpfXlJSwmQyGTt06BBjjEvUjo6OrLy8nC8zcuRINnr06FrFSEhzR9eoCWkhcnNzcf/+fQQGBqrNDwwMxOXLl9XmjRkzBnZ2djh27BhkMhk///Lly4iIiMBnn33Gz1MqlSguLkZhYSHkcjkAoGPHjvxyPT09KBQKZGRkAADee+89vPbaa7h06RL69euH4OBgBAQEVBuzt7c3+vTpAy8vL/Tv3x/9+vXD66+/DmNjYxQUFODOnTuYOHEiJk+ezK9TXl4OQ0NDPt7bt2/DwMBAbbvFxcW4c+cO/97T0xMikYh/b21tjatXr9ZwNAlpOShRE9IKDRo0CH/88QciIyPxyiuv8PPz8/OxaNEijBgxoso6urq6/GuxWKy2TCAQQKVSAQAGDhyIxMRE7N+/H+Hh4ejTpw9CQ0OxYsWKKtsUiUQIDw/H6dOncfjwYXz33XeYN28ezp49y/8o+PHHH9GtW7cq61XE26VLF2zcuLHKts3NzWsVLyEtHSVqQloIhUIBGxsbREREoGfPnvz8iIgI+Pn5qZV977330KFDBwwdOhR///03X75z586Ii4tD27ZtGxSLubk5QkJCEBISgh49emDWrFnVJmqAS5qBgYEIDAzE/Pnz4ejoiF27dmHmzJmwsbHB3bt3MXbs2GrX7dy5M7Zs2QILCwsoFIoGxUxIS0WJmpAWZNasWViwYAHatGmDTp06YcOGDYiOjq72jPODDz6AUqnEq6++igMHDqB79+6YP38+Xn31VTg4OOD111+HUCjE5cuXERMTg//973+1imH+/Pno0qULPD09UVJSgn379sHDw6PasmfPnsXRo0fRr18/WFhY4OzZs3jw4AFfftGiRZg2bRoMDQ0xYMAAlJSU4MKFC8jKysLMmTMxduxYLF++HMOGDcPixYthZ2eHxMRE7Ny5E7Nnz4adnV39DyYhLQQlakJakGnTpiEnJwcffvghMjIy0L59e+zduxeurq7Vlp8xYwZUKhUGDRqEgwcPon///ti3bx8WL16ML774AmKxGO7u7pg0aVKtY5BIJJg7dy4SEhIgk8nQo0cPbN68udqyCoUCp06dwqpVq5CbmwtHR0d89dVXGDhwIABg0qRJkMvlWL58OWbNmgU9PT14eXlhxowZAAC5XI5Tp05hzpw5GDFiBPLy8mBra4s+ffrQGTZ5YQgYY0zbQRBCCCGkevTAE0IIIaQZo0RNCCGENGOUqAkhhJBmjBI1IYQQ0oxRoiaEEEKaMUrUhBBCSDNGiboeVq9eDScnJ+jq6qJbt244d+6ctkNSs3TpUvj6+sLAwAAWFhYIDg5WG88Y4J6VHBoaClNTU+jr6+O1115Denq6WpmkpCQMHjwYcrkcFhYWmDVrltpwiABw4sQJdO7cGVKpFG3btkVYWFiVeJryeC1btgwCgYC/DxdofXVNSUnBW2+9BVNTU8hkMnh5eeHChQv8csYY5s+fD2tra8hkMgQFBeHWrVtq28jMzMTYsWOhUChgZGSEiRMnIj8/X63MlStX0KNHD+jq6sLe3h5ffvlllVi2bdsGd3d36OrqwsvLC/v379dYPZVKJT799FM4OztDJpOhTZs2WLJkCZ68o7Ql1/XUqVMYMmQIbGxsIBAIsHv3brXlzalutYmlvnUtKyvDnDlz4OXlBT09PdjY2GD8+PG4f/9+i6xro9DeeCAt0+bNm5lEImG//PILu3btGps8eTIzMjJi6enp2g6N179/f7ZhwwYWExPDoqOj2aBBg5iDgwPLz8/ny7z77rvM3t6eHT16lF24cIG99NJLLCAggF9eXl7OOnTowIKCglhUVBTbv38/MzMzY3PnzuXL3L17l8nlcjZz5kx2/fp19t133zGRSMQOHjzIl2nK43Xu3Dnm5OTEOnbsyKZPn94q65qZmckcHR3ZhAkT2NmzZ9ndu3fZoUOH2O3bt/kyy5YtY4aGhmz37t3s8uXLbOjQoczZ2ZkVFRXxZQYMGMC8vb3ZmTNn2D///MPatm3LxowZwy/PyclhlpaWbOzYsSwmJoZt2rSJyWQytm7dOr5MREQEE4lE7Msvv2TXr19nn3zyCROLxezq1asaqetnn33GTE1N2b59+1h8fDzbtm0b09fXZ998802rqOv+/fvZvHnz2M6dOxkAtmvXLrXlzalutYmlvnXNzs5mQUFBbMuWLezGjRssMjKS+fn5sS5duqhto6XUtTFQoq4jPz8/Fhoayr9XKpXMxsaGLV26VItR1SwjI4MBYCdPnmSMcf8xxGIx27ZtG18mNjaWAWCRkZGMMe4/llAoZGlpaXyZNWvWMIVCwY8DPHv2bObp6am2r9GjR7P+/fvz75vqeOXl5TFXV1cWHh7OevbsySfq1lbXOXPmsO7duz9zuUqlYlZWVmz58uX8vOzsbCaVStmmTZsYY4xdv36dAWDnz5/nyxw4cIAJBAKWkpLCGGPshx9+YMbGxnz9K/bt5ubGvx81ahQbPHiw2v67devG/vOf/zSsko8NHjyYvfPOO2rzRowYwcaOHdvq6vp08mpOdatNLA2pa3XOnTvHALDExMQWXVdNoabvOigtLcXFixcRFBTEzxMKhQgKCkJkZKQWI6tZTk4OAMDExAQAcPHiRZSVlanVw93dHQ4ODnw9IiMj4eXlBUtLS75M//79kZubi2vXrvFlntxGRZmKbTTl8QoNDcXgwYOrxNPa6rp371507doVI0eOhIWFBXx8fPDjjz/yy+Pj45GWlqYWh6GhIbp166ZWXyMjI3Tt2pUvExQUBKFQiLNnz/JlXn75ZUgkErX6xsXFISsriy9T0zFpqICAABw9ehQ3b94EwA15+e+///KPH21NdX1ac6pbbWLRtJycHAgEAhgZGbX6utYGJeo6ePjwIZRKpdoXOgBYWloiLS1NS1HVTKVSYcaMGQgMDESHDh0AAGlpaZBIJPx/ggpP1iMtLa3aelYsq6lMbm4uioqKmux4bd68GZcuXcLSpUurLGttdb179y7WrFkDV1dXHDp0CO+99x6mTZuGX3/9VS3emuJIS0uDhYWF2nIdHR2YmJho5Jhoqr4ff/wx3njjDbi7u0MsFsPHxwczZszgR9pqTXV9WnOqW21i0aTi4mLMmTMHY8aM4Z/n3lrrWls0KEcrFxoaipiYGPz777/aDqVRJCcnY/r06QgPD1cbT7m1UqlU6Nq1Kz7//HMAgI+PD2JiYrB27VqEhIRoOTrN2rp1KzZu3Ig///wTnp6eiI6OxowZM2BjY9Pq6ko4ZWVlGDVqFBhjWLNmjbbDaTbojLoOzMzMIBKJqvQYTk9Ph5WVlZaierapU6di3759OH78uNpwgFZWVigtLUV2drZa+SfrYWVlVW09K5bVVEahUEAmkzXJ8bp48SIyMjLQuXNn6OjoQEdHBydPnsS3334LHR0dWFpatpq6AoC1tTXat2+vNs/DwwNJSUlq8dYUh5WVFTIyMtSWl5eXIzMzUyPHRFP1nTVrFn9W7eXlhXHjxuG///0v33LSmur6tOZUt9rEogkVSToxMRHh4eFqo6O1trrWFSXqOpBIJOjSpQuOHj3Kz1OpVDh69Cj8/f21GJk6xhimTp2KXbt24dixY3B2dlZb3qVLF4jFYrV6xMXFISkpia+Hv78/rl69qvafo+I/T0Wi8Pf3V9tGRZmKbTTF8erTpw+uXr2K6OhofuratSvGjh3Lv24tdQWAwMDAKrfa3bx5E46OjgAAZ2dnWFlZqcWRm5uLs2fPqtU3OzsbFy9e5MscO3YMKpUK3bp148ucOnUKZWVlavV1c3ODsbExX6amY9JQhYWFEArVv6JEIhFUKlWrq+vTmlPdahNLQ1Uk6Vu3buHIkSMwNTVVW96a6lovWuvG1kJt3ryZSaVSFhYWxq5fv86mTJnCjIyM1HoMa9t7773HDA0N2YkTJ1hqaio/FRYW8mXeffdd5uDgwI4dO8YuXLjA/P39mb+/P7+84palfv36sejoaHbw4EFmbm5e7S1Ls2bNYrGxsWz16tXV3rLU1MfryV7fra2u586dYzo6Ouyzzz5jt27dYhs3bmRyuZz98ccffJlly5YxIyMjtmfPHnblyhU2bNiwam/r8fHxYWfPnmX//vsvc3V1VbvVJTs7m1laWrJx48axmJgYtnnzZiaXy6vc6qKjo8NWrFjBYmNj2YIFCzR6e1ZISAiztbXlb8/auXMnMzMzY7Nnz24Vdc3Ly2NRUVEsKiqKAWArV65kUVFRfE/n5lS32sRS37qWlpayoUOHMjs7OxYdHa32nfVkD+6WUtfGQIm6Hr777jvm4ODAJBIJ8/PzY2fOnNF2SGoAVDtt2LCBL1NUVMTef/99ZmxszORyORs+fDhLTU1V205CQgIbOHAgk8lkzMzMjH344YesrKxMrczx48dZp06dmEQiYS4uLmr7qNDUx+vpRN3a6vrXX3+xDh06MKlUytzd3dn69evVlqtUKvbpp58yS0tLJpVKWZ8+fVhcXJxamUePHrExY8YwfX19plAo2Ntvv83y8vLUyly+fJl1796dSaVSZmtry5YtW1Yllq1bt7J27doxiUTCPD092d9//62xeubm5rLp06czBwcHpqury1xcXNi8efPUvrxbcl2PHz9e7f/TkJCQZle32sRS37rGx8c/8zvr+PHjLa6ujUHA2BOP+SGEEEJIs0LXqAkhhJBmjBI1IYQQ0oxRoiaEEEKaMUrUhBBCSDNGiZoQQghpxihRE0IIIc0YJep6KikpwcKFC1FSUqLtUBrdi1RX4MWqL9W19XqR6tva60r3UddTbm4uDA0NkZOTo/ZM2tboRaor8GLVl+raer1I9W3tdaUzakIIIaQZo0RNCCGENGMv3HjU5eXliIqKgqWlZZWReeoiLy8PAJCSkoLc3FxNhdcsvUh1BV6s+lJdW68Xqb4tsa4qlQrp6enw8fGBjk7NqfiFu0Z9/vx5+Pn5aTsMQgghBOfOnYOvr2+NZV64M2pLS0sA3MGxtrbWcjSEEEJeRKmpqfDz8+NzUk1euERd0dxtbW0NOzs7LUdDCCHkRVabS7DUmYwQQghpxihRE0IIIc0YJWpCCCGkGXvhrlETQkhNlEolysrKtB0GaeHEYjFEIpFGtkWJugFiUnJwP7sI3vZGsFToajscQkgDMMaQlpaG7OxsbYdCWgkjIyNYWVlBIBA0aDuUqBtg8b7rOBefie/f9MGrHW20HQ4hpAEqkrSFhQXkcnmDv1zJi4sxhsLCQmRkZABAg28FpkTdAD3ZefiJrkCQKgQoURPSYimVSj5Jm5qaajsc0grIZDIAQEZGBiwsLBrUDE6dyRqgR9ExfCTeBr30C9oOhRDSABXXpOVyuZYjIa1JxeepoX0eKFE3gFJqzL0ozNRuIIQQjaDmbqJJmvo8UaJuCLkJAEBQnKXlQAghhLRWlKgbQPg4UYtLs7UbCCGEaJCTkxNWrVpV6/InTpyAQCBo9B7zYWFhMDIyatR9NEdaTdRLly6Fr68vDAwMYGFhgeDgYMTFxdW4TlhYGAQCgdqkq6udW6PEBmYAAF1K1IQQLXj6u/DpaeHChfXa7vnz5zFlypRalw8ICEBqaioMDQ3rtT9SM632+j558iRCQ0Ph6+uL8vJy/N///R/69euH69evQ09P75nrKRQKtYSuretKUkNzAIBcmaOV/RNCXmypqan86y1btmD+/Plq3436+vr8a8YYlErlc8c+BgBzc/M6xSGRSGBlZVWndUjtafWM+uDBg5gwYQI8PT3h7e2NsLAwJCUl4eLFizWuJxAIYGVlxU+1GSasMegZWgAADFQtY6ByQkjr8uT3oKGhodp3440bN2BgYIADBw6gS5cukEql+Pfff3Hnzh0MGzYMlpaW0NfXh6+vL44cOaK23aebvgUCAX766ScMHz4ccrkcrq6u2Lt3L7/86abviibqQ4cOwcPDA/r6+hgwYIDaD4vy8nJMmzYNRkZGMDU1xZw5cxASEoLg4OA6HYM1a9agTZs2kEgkcHNzw++//84vY4xh4cKFcHBwgFQqhY2NDaZNm8Yv/+GHH+Dq6gpdXV1YWlri9ddfr9O+m0qzukadk8OdmZqYmNRYLj8/H46OjrC3t8ewYcNw7dq1pgivCn0T7lenIfJRVKrUSgyEkMbBGENhablWJsaYxurx8ccfY9myZYiNjUXHjh2Rn5+PQYMG4ejRo4iKisKAAQMwZMgQJCUl1bidRYsWYdSoUbhy5QoGDRqEsWPHIjPz2Xe8FBYWYsWKFfj9999x6tQpJCUl4aOPPuKXf/HFF9i4cSM2bNiAiIgI5ObmYvfu3XWq265duzB9+nR8+OGHiImJwX/+8x+8/fbbOH78OABgx44d+Prrr7Fu3TrcunULu3fvhpeXFwDgwoULmDZtGhYvXoy4uDgcPHgQL7/8cp3231SazQNPVCoVZsyYgcDAQHTo0OGZ5dzc3PDLL7+gY8eOyMnJwYoVKxAQEIBr165VO750SUkJSkpK+Pd5eXkai7nijFpfUIyU3DzYmhlpbNuEEO0qKlOi/fxDWtn39cX9IZdo5ut58eLF6Nu3L//exMQE3t7e/PslS5Zg165d2Lt3L6ZOnfrM7UyYMAFjxowBAHz++ef49ttvce7cOQwYMKDa8mVlZVi7di3atGkDAJg6dSoWL17ML//uu+8wd+5cDB8+HADw/fffY//+/XWq24oVKzBhwgS8//77AICZM2fizJkzWLFiBXr37o2kpCRYWVkhKCgIYrEYDg4O8PPzAwAkJSVBT08Pr776KgwMDODo6AgfH5867b+pNJsz6tDQUMTExGDz5s01lvP398f48ePRqVMn9OzZEzt37oS5uTnWrVtXbfmlS5fC0NCQn9q3b6+xmAW6hlA+PoR5meka2y4hhGhK165d1d7n5+fjo48+goeHB4yMjKCvr4/Y2NjnnlF37NiRf62npweFQsE/IrM6crmcT9IA9xjNivI5OTlIT0/nkyYAiEQidOnSpU51i42NRWBgoNq8wMBAxMbGAgBGjhyJoqIiuLi4YPLkydi1axfKy8sBAH379oWjoyNcXFwwbtw4bNy4EYWFhXXaf1NpFmfUU6dOxb59+3Dq1Klqz4prIhaL4ePjg9u3b1e7fO7cuZg5cyb/PiUlRXPJWihEnkAfRiwXBdkPALhpZruEEK2TiUW4vri/1vatKU93zP3oo48QHh6OFStWoG3btpDJZHj99ddRWlpa43bEYrHae4FAAJVKVafymmzSrw17e3vExcXhyJEjCA8Px/vvv4/ly5fj5MmTMDAwwKVLl3DixAkcPnwY8+fPx8KFC3H+/PlmdwuYVs+oGWOYOnUqdu3ahWPHjsHZ2bnO21Aqlbh69eozH3oulUqhUCj4ycDAoKFhqykQKQAARTkPNLpdQoh2CQQCyCU6Wpka806WiIgITJgwAcOHD4eXlxesrKyQkJDQaPurjqGhISwtLXH+/Hl+nlKpxKVLl+q0HQ8PD0RERKjNi4iIUDsZk8lkGDJkCL799lucOHECkZGRuHr1KgBAR0cHQUFB+PLLL3HlyhUkJCTg2LFjDahZ49DqGXVoaCj+/PNP7NmzBwYGBkhLSwPA/SNWPNB8/PjxsLW1xdKlSwFw11teeukltG3bFtnZ2Vi+fDkSExMxadIkrdThgdQZuaVC5JVQZzJCSPPn6uqKnTt3YsiQIRAIBPj0009rPDNuLB988AGWLl2Ktm3bwt3dHd999x2ysrLq9CNl1qxZGDVqFHx8fBAUFIS//voLO3fu5Huxh4WFQalUolu3bpDL5fjjjz8gk8ng6OiIffv24e7du3j55ZdhbGyM/fv3Q6VSwc2t+bWMajVRr1mzBgDQq1cvtfkbNmzAhAkTAHAX/IXCyhP/rKwsTJ48GWlpaTA2NkaXLl1w+vRpjV57rovtrkvxx5kkTJO6YqBWIiCEkNpbuXIl3nnnHQQEBMDMzAxz5sxBbm7T32I6Z84cpKWlYfz48RCJRJgyZQr69+9fp1GmgoOD8c0332DFihWYPn06nJ2dsWHDBj6nGBkZYdmyZZg5cyaUSiW8vLzw119/wdTUFEZGRti5cycWLlyI4uJiuLq6YtOmTfD09GykGtefgDX1RQMtu3fvHuzt7ZGcnFzn6+HVWXk4Dt8eu41xLzliSfCze6sTQpqv4uJixMfHw9nZWWtPOnzRqVQqeHh4YNSoUViyZIm2w9GImj5XdclFzaIzWUtmrCcBAGQW1twRgxBCSKXExEQcPnwYPXv2RElJCb7//nvEx8fjzTff1HZozU6zuT2rpeqYeRBHJR9iSMo32g6FEEJaDKFQiLCwMPj6+iIwMBBXr17FkSNH4OHhoe3Qmh06o24gA1E52ghT8aAkRduhEEJIi2Fvb1+lxzapHiXqBlK17Yc3/i1GqdgaO7UdDCGEkFaHEnUDGVg44IyqPSSFQjDGtDaSFyGEkNaJrlE3kImc60xWqlShgAbmIIQQomF0Rt1AMmE5JkiOQF+Zh6y8l6Ev1eyTzwghhLzYKFE3lECIhcJfACEQkzUP9maUqAkhhGgONX03lEiMAoEcAJCfTSNoEUII0SxK1BpQIOQG5iimgTkIIS1Qr169MGPGDP69k5MTVq1aVeM6AoEAu3fvbvC+NbWdmixcuBCdOnVq1H00JkrUGlAkNgQAlOY+1HIkhJAXyZAhQzBgwIBql/3zzz8QCAS4cuVKnbd7/vx5TJkypaHhqXlWskxNTcXAgTRSQk0oUWtAmcQIAFBe8Ei7gRBCXigTJ05EeHg47t27V2XZhg0b0LVrV3Ts2LHO2zU3N4dcLtdEiM9lZWUFqVTaJPtqqShRa0C51AQAwAoztRwJIeRF8uqrr8Lc3BxhYWFq8/Pz87Ft2zZMnDgRjx49wpgxY2Brawu5XA4vLy9s2rSpxu0+3fR969YtvPzyy9DV1UX79u0RHh5eZZ05c+agXbt2kMvlcHFxwaeffoqysjIA3HCTixYtwuXLlyEQCCAQCPiYn276vnr1Kl555RXIZDKYmppiypQpyM/P55dPmDABwcHBWLFiBaytrWFqaorQ0FB+X7WhUqmwePFi2NnZQSqVolOnTjh48CC/vLS0FFOnToW1tTV0dXXh6OjID7XMGMPChQvh4OAAqVQKGxsbTJs2rdb7rg/q9a0BTGYMABAVUaImpNUpLaj7OiIpIHr89aosB5QlgEAIiGXP365Er9a70dHRwfjx4xEWFoZ58+bxD1zatm0blEolxowZg/z8fHTp0gVz5syBQqHA33//jXHjxqFNmzbw8/N77j5UKhVGjBgBS0tLnD17Fjk5OWrXsysYGBggLCwMNjY2uHr1KiZPngwDAwPMnj0bo0ePRkxMDA4ePMiPFW1oaFhlGwUFBejfvz/8/f1x/vx5ZGRkYNKkSZg6daraj5Hjx4/D2toax48fx+3btzF69Gh06tQJkydPrtVx++abb/DVV19h3bp18PHxwS+//IKhQ4fi2rVrcHV1xbfffou9e/di69atcHBwQHJyMpKTkwEAO3bswNdff43NmzfD09MTaWlpuHz5cq32W1+UqDVAqMedUeuUZGs3EEKI5n1uU/d1RoYBnsO51zf+ArZNABy7A2//XVlmlRdQWM3lsoU5ddrVO++8g+XLl+PkyZP8OMwbNmzAa6+9BkNDQxgaGuKjjz7iy3/wwQc4dOgQtm7dWqtEfeTIEdy4cQOHDh2CjQ13LD7//PMq15U/+eQT/rWTkxM++ugjbN68GbNnz4ZMJoO+vj50dHRgZWX1zH39+eefKC4uxm+//QY9Pe4Hy/fff48hQ4bgiy++gKWlJQDA2NgY33//PUQiEdzd3TF48GAcPXq01ol6xYoVmDNnDt544w0AwBdffIHjx49j1apVWL16NZKSkuDq6oru3btDIBDA0dGRXzcpKQlWVlYICgqCWCyGg4NDrY5jQ1DTtwaI9c0AANKybO0GQgh54bi7uyMgIAC//PILAOD27dv4559/MHHiRACAUqnEkiVL4OXlBRMTE+jr6+PQoUNISkqq1fZjY2Nhb2/PJ2kA8Pf3r1Juy5YtCAwMhJWVFfT19fHJJ5/Ueh9P7svb25tP0gAQGBgIlUqFuLg4fp6npydEIhH/3traGhkZGbXaR25uLu7fv4/AwEC1+YGBgYiNjQXANa9HR0fDzc0N06ZNw+HDh/lyI0eORFFREVxcXDB58mTs2rUL5eXldapnXdEZtQZIFVyilpfX7ZcwIaQF+L/7dV9H9ETnKPch3DYET50XzbjasLieMHHiRHzwwQdYvXo1NmzYgDZt2qBnz54AgOXLl+Obb77BqlWr4OXlBT09PcyYMQOlpaUa239kZCTGjh2LRYsWoX///jA0NMTmzZvx1VdfaWwfTxKLxWrvBQIBVCqVxrbfuXNnxMfH48CBAzhy5AhGjRqFoKAgbN++Hfb29oiLi8ORI0cQHh6O999/n2/ReDouTaEzag2QGVoAAPRVeVCpmJajIYRolESv7pPoiXMgkQ4378nr0zVttx5GjRoFoVCIP//8E7/99hveeecd/np1REQEhg0bhrfeegve3t5wcXHBzZs3a71tDw8PJCcnIzU1lZ935swZtTKnT5+Go6Mj5s2bh65du8LV1RWJiYnq1ZVIoFTWPB6Ch4cHLl++jIKCyuv3EREREAqFcHNzq3XMNVEoFLCxsakyxGZERATat2+vVm706NH48ccfsWXLFuzYsQOZmVw/JJlMhiFDhuDbb7/FiRMnEBkZiatXNffD62l0Rq0BesZcojYS5CGvuByG8sb5VUUIIdXR19fH6NGjMXfuXOTm5mLChAn8MldXV2zfvh2nT5+GsbExVq5cifT0dLWkVJOgoCC0a9cOISEhWL58OXJzczFv3jy1Mq6urkhKSsLmzZvh6+uLv//+G7t27VIr4+TkhPj4eERHR8POzg4GBgZVbssaO3YsFixYgJCQECxcuBAPHjzABx98gHHjxvHXpzVh1qxZWLBgAdq0aYNOnTphw4YNiI6OxsaNGwEAK1euhLW1NXx8fCAUCrFt2zZYWVnByMgIYWFhUCqV6NatG+RyOf744w/IZDK169iaRmfUGiBVWCCVmSKVmSCzUHPNSYQQUlsTJ05EVlYW+vfvr3Y9+ZNPPkHnzp3Rv39/9OrVC1ZWVggODq71doVCIXbt2oWioiL4+flh0qRJ+Oyzz9TKDB06FP/9738xdepUdOrUCadPn8ann36qVua1117DgAED0Lt3b5ibm1d7i5hcLsehQ4eQmZkJX19fvP766+jTpw++//77uh2M55g2bRpmzpyJDz/8EF5eXjh48CD27t0LV1dXAFwP9i+//BJdu3aFr68vEhISsH//fgiFQhgZGeHHH39EYGAgOnbsiCNHjuCvv/6CqampRmN8koAx9kK11d67dw/29vZITk6GnZ2dxrbb/YtjuJdVhB3vBaCLo7HGtksIaXzFxcWIj4+Hs7MzdHV1tR0OaSVq+lzVJRfRGbWGmOhx41JnFdAZNSGEEM2hRK0hxnIuUVPTNyGEEE2iRK0hU3OW45hkJnTvndZ2KIQQQloRStQaYq58CBdhGlhemrZDIYQQ0opoNVEvXboUvr6+MDAwgIWFBYKDg9WePvMs27Ztg7u7O3R1deHl5YX9+/c3QbQ1O992GkaVfIoosY+2QyGEENKKaDVRnzx5EqGhoThz5gzCw8NRVlaGfv36qd3s/rTTp09jzJgxmDhxIqKiohAcHIzg4GDExMQ0YeRVlVp3xjnmgZTSphkajhCieZp8uhUhmvo8afWBJ08OKwZwQ6FZWFjg4sWLePnll6td55tvvsGAAQMwa9YsAMCSJUsQHh6O77//HmvXrm30mJ/FRE69vglpqSQSCYRCIe7fvw9zc3NIJBL+yV6E1BVjDKWlpXjw4AGEQiEkEkmDttesnkyWk8M9K9vExOSZZSIjIzFz5ky1ef3791cbz1QbrMvvYZzoMIQ5VgACtBoLIaRuhEIhnJ2dkZqaivv36/Fsb0KqIZfL4eDgAKGwYY3XzSZRq1QqzJgxA4GBgejQocMzy6WlpVV5lJylpSXS0qrvxFVSUoKSkhL+fV5enmYCfopV7lUsEYfhdLE3gLmNsg9CSOORSCRwcHBAeXn5c59JTcjziEQi6OjoaKRlptkk6tDQUMTExODff//V6HaXLl2KRYsWaXSb1ZEZmgMA9FW5UKoYREJqNiOkpREIBBCLxY02ChIh9dEsbs+aOnUq9u3bh+PHjz/3UWpWVlZIT09Xm5eenv7Mwcjnzp2LnJwcfrp+/brG4n5SxcAcxshHTlFZo+yDEELIi0eriZoxhqlTp2LXrl04duwYnJ2dn7uOv78/jh49qjYvPDy82oHMAUAqlUKhUPCTgYGBRmJ/mo4+Nya1sSAPmdShjBBCiIZotek7NDQUf/75J/bs2QMDAwP+OrOhoSFkMm7s1vHjx8PW1hZLly4FAEyfPh09e/bEV199hcGDB2Pz5s24cOEC1q9fr7V6AABk3EAc+oJiZOflAxb62o2HEEJIq6DVM+o1a9YgJycHvXr1grW1NT9t2bKFL5OUlKQ2YHlAQAD+/PNPrF+/Ht7e3ti+fTt2795dYwe0JqFrBOXjw5mXlaHdWAghhLQaWj2jrs0ImydOnKgyb+TIkRg5cmQjRNQAQiEKhfowUOWiOOeBtqMhhBDSSjSLzmStRZGOEQCgJO+hdgMhhBDSalCi1qASiSEAoJwSNSGEEA2hRK1BSinXoYwVZmo5EkIIIa0FJWoNYo97fguKKFETQgjRDErUGiSQmwIAdEqytRsIIYSQVoMStQaJDG2QwkyRXU6PHySEEKIZzeZZ361Bqd976PNPexgIdDBB28EQQghpFeiMWoMqxqTOKy5HmZIGoCeEENJwlKg1SCETo2LQrKxCet43IYSQhqOmbw0S5d7DbulCKFUqZBW8DAsDXW2HRAghpIWjRK1JIjE64iaUAgHO5RcDaJyRugghhLw4KFFrktwUy40+wbl0Id6mpm9CCCEaQNeoNUkkxi2T3jjP3JFZWK7taAghhLQClKg1zESP6/mdVUBn1IQQQhqOmr41rFPpJUhEl4BMIQBXbYdDCCGkhaMzag0LfLAJi8W/wiQzWtuhEEIIaQUoUWsY0zUBAAiKsrQcCSGEkNaAErWGCeRcotYpoURNCCGk4ShRa5iOPjeClqQ0W7uBEEIIaRUoUWuYRGEOAJCV52g5EkIIIa0BJWoNkxlyiVqhykNxmVLL0RBCCGnp6pWok5OTce/ePf79uXPnMGPGDKxfv15jgbVUFYnaSJCH7MIyLUdDCCGkpatXon7zzTdx/PhxAEBaWhr69u2Lc+fOYd68eVi8eLFGA2xpKjqTGQvykUkPPSGEENJA9UrUMTEx8PPzAwBs3boVHTp0wOnTp7Fx40aEhYVpMr6WR/Y4USMfWQUlWg6GEEJIS1evRF1WVgapVAoAOHLkCIYOHQoAcHd3R2pqquaia4ken1FLBWXIyaUOZYQQQhqmXona09MTa9euxT///IPw8HAMGDAAAHD//n2YmppqNMAWR6KPMogBAEU5D7QcDCGEkJauXon6iy++wLp169CrVy+MGTMG3t7eAIC9e/fyTeK1cerUKQwZMgQ2NjYQCATYvXt3jeVPnDgBgUBQZUpLS6tPNRqHQIAiHQUAoDiXEjUhhJCGqdegHL169cLDhw+Rm5sLY2Njfv6UKVMgl8trvZ2CggJ4e3vjnXfewYgRI2q9XlxcHBQKBf/ewsKi1us2hXxda+TmCVBYWKjtUAghhLRw9UrURUVFYIzxSToxMRG7du2Ch4cH+vfvX+vtDBw4EAMHDqzz/i0sLGBkZFTn9ZrKgZf+wJJ91zFEYKPtUAghhLRw9Wr6HjZsGH777TcAQHZ2Nrp164avvvoKwcHBWLNmjUYDrE6nTp1gbW2Nvn37IiIiosayJSUlyM3N5ae8vLxGj89Ej7tGTWNSE0IIaah6JepLly6hR48eAIDt27fD0tISiYmJ+O233/Dtt99qNMAnWVtbY+3atdixYwd27NgBe3t79OrVC5cuXXrmOkuXLoWhoSE/tW/fvtHiq2AslwAA3UdNCCGkwerV9F1YWAgDAwMAwOHDhzFixAgIhUK89NJLSExM1GiAT3Jzc4Obmxv/PiAgAHfu3MHXX3+N33//vdp15s6di5kzZ/LvU1JSGj1Zt0nZi12SNTiX6wugR6PuixBCSOtWrzPqtm3bYvfu3UhOTsahQ4fQr18/AEBGRoZaJ6+m4Ofnh9u3bz9zuVQqhUKh4KeKHxiNyUCVCx/hbViXJYMx1uj7I4QQ0nrVK1HPnz8fH330EZycnODn5wd/f38A3Nm1j4+PRgN8nujoaFhbWzfpPp9H4jkYk0tn4ruyoSiigTkIIYQ0QL2avl9//XV0794dqamp/D3UANCnTx8MHz681tvJz89XOxuOj49HdHQ0TExM4ODggLlz5yIlJYXvuLZq1So4OzvD09MTxcXF+Omnn3Ds2DEcPny4PtVoNDKrdjgp9ENpuQqZBaWQS+p1mAkhhJD6JWoAsLKygpWVFT+Klp2dXZ0edgIAFy5cQO/evfn3FdeSQ0JCEBYWhtTUVCQlJfHLS0tL8eGHHyIlJQVyuRwdO3bEkSNH1LbRHAgEApjIJUjLLUZWQRnsjJ+/DiGEEFKdeiVqlUqF//3vf/jqq6+Qn58PADAwMMCHH36IefPmQSisXYt6r169aryG+/QAH7Nnz8bs2bPrE3LTKi3EcPFp5IoykVlYtx8vhBBCyJPqlajnzZuHn3/+GcuWLUNgYCAA4N9//8XChQtRXFyMzz77TKNBtjhlRZhTsAIQA3vypgMw13ZEhBBCWqh6Jepff/0VP/30Ez9qFgB07NgRtra2eP/99ylRy4ygggBCMBRkPwTgpO2ICCGEtFD16vWdmZkJd3f3KvPd3d2RmZnZ4KBaPKEIxSLuNrDSvAwtB0MIIaQlq1ei9vb2xvfff19l/vfff4+OHTs2OKjWoERsBAAoy3uk3UAIIYS0aPVq+v7yyy8xePBgHDlyhL+HOjIyEsnJydi/f79GA2ypyqRGQHESVIWUqAkhhNRfvc6oe/bsiZs3b2L48OHIzs5GdnY2RowYgWvXrj3zUZ4vGpXu43uyCulSACGEkPqr933UNjY2VTqNXb58GT///DPWr1/f4MBaOoGeKQBApyRbu4EQQghp0ep1Rk2eT/Q4UUtKs7QcCSGEkJaMEnUjkSjMAAC6ZTk0MAchhJB6o0TdSGQK7iEnRshHXkm5lqMhhBDSUtXpGvWIESNqXJ6dnd2QWFoVsT7X9G0kyENWQSkUumItR0QIIaQlqlOiNjQ0fO7y8ePHNyigVkNuAgAwRj4yC0rhaKqn5YAIIYS0RHVK1Bs2bGisOFofuSkKIUMhpMgqLNV2NIQQQlooukbdWCw98R/7PRha+hkyC8q0HQ0hhJAWihJ1IzLRkwAAsgrojJoQQkj9UKJuRMZyLlFnUtM3IYSQeqJE3YhGpizFbsknEGVc1XYohBBCWihK1I3IofQuOgnvIjH+NkrKldoOhxBCSAtEiboRyQcuxiydj3G6yBFHY2lcakIIIXVHiboRidoFwbzrcDyCIbZdSNZ2OIQQQlogStSN7PUudgCAkzcfID23WMvREEIIaWkoUTemR3fgkrof71nGQsWAXVEp2o6IEEJIC0OJujE9ugPsnIxZuZ+js+Amtl1IppG0CCGE1Akl6sbk2hfwGgkhU+JbyWpkPHiAqORsbUdFCCGkBaFE3ZgEAmDwV4CRI+wED/A/8S/YTp3KCCGE1IFWE/WpU6cwZMgQ2NjYQCAQYPfu3c9d58SJE+jcuTOkUinatm2LsLCwRo+zQXQNgdd+BhOIMEx0GriyGcVldE81IYSQ2tFqoi4oKIC3tzdWr15dq/Lx8fEYPHgwevfujejoaMyYMQOTJk3CoUOHGjnSBrL3Bes1FwDwf+xn/Hv2rJYDIoQQ0lLUaZhLTRs4cCAGDhxY6/Jr166Fs7MzvvrqKwCAh4cH/v33X3z99dfo379/Y4WpEcIeM5F8cT/scy/B5eR04KUIQEei7bAIIYQ0cy3qGnVkZCSCgoLU5vXv3x+RkZHPXKekpAS5ubn8lJeX19hhVk8ogui19chmenApu4m8g4u0EwchhJAWpUUl6rS0NFhaWqrNs7S0RG5uLoqKiqpdZ+nSpTA0NOSn9u3bN0Wo1bJxdMXPJjMBAPoXVgN3T2gtFkIIIS1Di0rU9TF37lzk5OTw0/Xr17Uaj2P3N7CxvA8EYGA7/wMUPNJqPIQQQpq3FpWorayskJ6erjYvPT0dCoUCMpms2nWkUikUCgU/GRgYNEWozzTIyworhSG4pbKFID8N2DtVq/EQQghp3lpUovb398fRo0fV5oWHh8Pf319LEdWdXKKDPh2dMK1sKrLEFkCXt7UdEiGEkGZMq4k6Pz8f0dHRiI6OBsDdfhUdHY2kpCQAXLP1+PHj+fLvvvsu7t69i9mzZ+PGjRv44YcfsHXrVvz3v//VRvj19noXe8QyR/Qu+RqFTq9wM1UqYMdk4NLvQBkN3kEIIYSj1UR94cIF+Pj4wMfHBwAwc+ZM+Pj4YP78+QCA1NRUPmkDgLOzM/7++2+Eh4fD29sbX331FX766admf2vW03ydjOFkKkd2qQD7r6ZxM++dA65uBQ79n3phZXnTB0gIIaTZ0Op91L169apxkIrqnjrWq1cvREVFNWJUjU8gEOD1LnZYcfgmtl9M5obCNHIAen8CqMoAsS5XkDFgbSBgYA04vATY+QJ2XbmnnRFCCHkhaDVRv8hGdLbDV+E3ceZuJpIeFcLB1AboOUu90IM44MENbrp7/PFMAWDhwSVtez/Azg8wbQsIW1R3A0IIIbVEiVpLbIxk6N7WDP/ceogNp+MxtXdbmOhJIBAIKgtZuAOh54H4k0DyOa55PCsByLjOTZd+5crpGgHWHQGrjoC1N+A2EJBqt3c7IYQQzRCwF2yA5Hv37sHe3h7Jycmws7PTaix7olMwfXM0/15fqgMHEzkcTORwNJXDwVQORxM9eFgbwFRfyhXKzwDunQeSzwLJ54H7l4DypzqfzboD6Jlxr2/8za3j0gswcW6SehFCCKlZXXIRnVFr0YAOVni1ozUuJmYhNacY+SXluJ6ai+upuWrlJDpCzBngjrcDnCDUtwDcB3MTACjLgPRrQNoVIPUKkHu/MkkDwPmfgTtHgcErAZOJ3LysBOD2EcDaB7BsD4irvwedEEKI9lGi1iKpjgjfv9kZAFBcpsS9rEIkPuKmpMxCJD4qwN2HBUh8VIgl+67jRFwGVoz0hqVCt3IjIjFg04mbquPUHWAq7pp2hbsngL8/5F4LRNw1b9vOgGMgNxnZN0Z1CSGE1AM1fTdzjDH8cTYJn/19HcVlKhjJxVg2wgsDOljXf6M39gMXfgbuRwOFD6suN3QAnB4nbccAwMQFePLaOSGEkAapSy6iRN1C3M7Ix4wtUYhJ4ZrFR3W1w/whntCXNqBRhDEg9z7Y/UtA8jkIEiO45M2U6uUMrAH/UCDgA+69soy731skAfrMB3QeXz9/EAeUFXGJXVdR/7gIIaSVo2vUrVBbC33sfC8Qq47cxJqTd7D1wj2cuZuJr0d3QhdH43ptMzGzEJvO5WH7RV0Yy4Pw7ZhZ8DARcD3MEyOAxNNAykUgL5W7RaxCWRFwbj33+pVPK+dHfANEb+Re65kDJm24pG3qwr3Wt+QSuFTB3QsuVdBtZYQQ8hyUqFsQiY4Qswe4o5ebBf67JRpJmYUYtS4Sob3a4LUudnAwkavf3lWNMqUKR2PTsfFsEv65Vdns/TC/FMGrI7BoqCdG+74CQds+j1co4nqZC5/4qAh1gJdnAcpS7qy6go4ul6ALHlROyWdqrlTn8cDQ77jXjAFHFnLJ3WskIJHX4egQQkjrRE3fLVRucRkW7LmGXVEp/DwDXR10sDGEl50hPG0U8LI1hJOpHoRCAe5lFWLL+WRsOZ+MjLwSANxl5x6u5hjZxQ47Lt3DibgHAIDgTjb4bLgX9OrbrF6cA2Te5aZHd4HMO9zrwkxuWUlu5S1lvpOBwSseVyoVWOkOCITA/6VWPqHtzFqospORbdAWxk7eEJi7UxInhLRodI26Bq0lUVf46/J9/PRvPGJTc1FarqqyXF+qA0dTOa6n5qLiX9pMX4JRXe0xxs8B9iZcwlOpGNaduosVh+OgVDG4mOth9Zud4WHdSNeay0uA4lxAKALkJty83FTg9HdAcTYQ/ANftGzdKxCnXuTfqyCEysgJOtaegIUnd4uZhSd3n7hQ1DjxEkKIBlGirkFrS9QVypQq3EzPw7WUXFxNyUHM/Rxcv5+LkieSd0AbU4zt5oi+7S0h0an+2vD5hEx88GcU0nKLIdURPm4Kt39uk3pjuZ2Rjy0/fQmbwhtwEyTDTZgMU0Fe9YV1dAF9C0BmwnV+6ziKm5+fAVzbDShsAI9XK8unxQCqcq75XkfK3erGGFCS98SU+3h6/N6+G+Dal1u/KBu4sY97MtyT2y0tAMRy6ilPCHkm6kz2AhKLhPC0MYSnjSFG+XL3QZcrVbjzoAC3M/LhYW0AF3P9527H18kE+6f3wH+3ROPkzQf4eOdVnI3PxP+CO9S/KbyeTt9+iHf/uIjc4m5wMOmNb8f44EhqDo5eiEHhvatwEyTBXZAMd9E9uAlTICkvBrKTuKk4p3JDD28CB2Zxz0R/MqHumKjeSa42XvmkMlHnJAN7QrlOck9ud8tb3FPjTF24fZq0AUzbVP6taEEghJBaoETdiumIhHCzMoCbVd2e+22iJ8GGCb5Ye+oOvjp8E7uiUhCVlIWQACcM9bapfJxpI9p6IRn/t/MqylUMnR2M8OP4rjDVl6KTvRFG+zniXlZv7Im+j3VRKbidkQ8hVLAVPEAX03L8x9cYHq5PPOBFagB4DOUS6pP0zLnmd2Vp5VRRnp8e91KveO8QULm+ji7g2o9b/qTMeKA0D0i9zE1Pk5kA5m6AWTvA3B0wbwdYeQP65po5eISQVoWavkmNzsVnYtomrikcAHSEAvRys8BrnW3xiocFpDqavSasUjGsOByHH07cAQC82tEaK0Z6Q1dc/X4YY4hJycXOqHvYeSkFOUVlAIABnlaYN9iDvwbfpMqKuce0Zt4BHt2p/PvoDpB3v/p1+iwAeszkXj+8DVz4BTBzBbq+XVkmN5W7vU2i1+hVIIQ0LrpGXQNK1HWXU1SGXZfuYWdUCq7cq2xSNpSJMcTbGiM628HH3qjB17GLy5T4cNtl/H0lFQAwtXdbzOzbDkJh7babVVCKr4/cxB9nEqFi3O1s/3nZBe/1agO5pJk0HpUWAo9ucw+HeRj3eCjTOKDfEqBdf65M7D5gy1jusa+TjlSu+3UHrrldR8ad3Uv0AKk+INHnXkv0AMnj+bqGgMwYcH4ZsOrArV9eApTkc8tETXg8lOVA7j0gKxHITqz8W5TN9Q0Qy7jWCR1dwH0Q0OYVbr2CR8D13Vw9Ooyo3F7aVa71Q0eXu1VQIHw8CZ54/dQklnPHCgBUSq6/gUCo/mAexp7dr4Axrj+DspQ7jhV/xfLKlpCSPODS70BpPtBzduW64fOBO8e446Aq4x4YpFJWvga4OspNKyd7X6DLhMptpFzkWmIM7Zv23440GkrUNaBE3TA30/Ow81IKdkel8GfZAOBipoch3jYY4m2NthZ1H2LzYX4JJv92AVFJ2RCLBPh8uBdGdq3fM8dvpOVi8V/XcfrOIwCAlUIXcwe5Y6i3jdY6xdVJ2lXgytbHT4R7v3L+MkeuR3xdDFoB+E3mXif8C4QNBkxdgQ8uVJbZPJa7510s55K8WM7d/iaWc0lULHvitR7319KTu94OADkpQMx2rkzFvgBgyzggNZpb/vTT7p6l72IgcDr3OuUi8OMrXHL6b0xlmfW9uVHj6iJgGvdjCOAuTXzbiavLvCdaOP54nRusRih6/ANAxL2uSMqo5qvyyecAFGYCXz4eoe7Th1znRADY9jZwbWfd4vUcDowM416rlMASM+6Z/R/GAQZW3Pwza4C7J7kfCvqW3OfFwBpQWAMGNtzgPHQXRO0wxv37yU0qf6yplJU/ABsBdSYjjaadpQE+HuiOWf3dcPrOQ+y8lIKDMWm4+7AA3xy9hW+O3oK7lQGGeNvg1Y7WcDStvpm2XKlCzP1cRNx+iMg7j3A+IRMl5SoodHWwblxX+LcxrXeM7lYKbJzUDYeupeOz/deRnFmE6Zuj8VtkIhYP84SnjWG9t90krLy46WlzEriztYIHXM/y0gLuDLk0v/J96eP3xTncF4+5e+X6xY9HZZM99SS7+9HcGW9dBC0Euv+Xe52bwp01GjupJ+qce1zHPgAQSQEjB8DYETBy5P7KTbkEWF7MPVinvJjrVV9BYgC4v1q1852+JZe8y4u5L1OmevakUgJ4+kz5ccJ9+guYPS6rKuemmgiEXJ0ETyRCXSOgw2tcvZRllYm6+wzAZyyX/IVibr5Q5/FfMbfPoiyg8FHlZNq2crsleYChHdfCIH9iZLx7F4CbB2qIUcQldQMr7t+cqbjn97/8EbdcWQb83Jc7Rm/vrxzD/toubtsyY0BmxNVLZszdNWHk2PKeYaBScf9nclO40QVz7wN6pty/FcAdh6X2QHkRMOsutwwAjv2Pu11U94knKeoqgHG7m/wHEJ1RkwbLLynH4Wtp2HclFaduPkC5qvIj5WVriFc7WmOQlzUKSssRcfsRIu88xNm7mcgrUf8ybGuhj7VvdUFbi+f3Tq+t4jIlfv43Ht8fu42iMiVkYhF+m+gHX6cXtOe1soxLik82+caf4pqhywq5ZF9WyDXRlxdxZcsKH/994rXvJMDrdW797CTg+OdcAu27qHK7iae5hGbkyC3T5uNin2zWZow7S2as8qE6AHcMyku4hK0qr/wRINR5fPtexW180ubR/Jx0BsiI5ZJQXhr3qN+8VK4vQ0EGF/vTPEcAIzdwr1VKYPHj/wez4yt/EO2ZCkT9/uz96llwP7SMnSp/dFl6ArZduOWMcQ84Egi45RVJrTiHa/4XPfWDpS5nrOUl3GdU17Byuw9vcXd2lBZwP05zU55Iyinc8VCVqW/HqQcwYV/l++Wu3DF7N6LyUtHfHwLnf1JfTywH5qXWPt4aUNN3DShRN67swlIcepy0I24/hKqGT5dCVwf+bUwR0MYMAW1M0dZCv9GaptNyivHhtmhE3H4EfakOfp/oBx+H+j0jnZBmT1nOJZ6KxF2cwyU2I0fA0Z8rwxhw6zB35u38MqDz+HHA1/cC985xZ/lF2ZWtMzn3gJKc6vfnNggYs6ly30sen5XOSahswflrOnAxrOq6T7Y0AOotIs4vA2O3VZb9nxX3A3L6Fe4HAgAc/hQ4/e1zDoiAa1lQ2AAKW8Dau7JlAeCOkdykcoAhgEv8RVlcS9STT1RsP+w5+6odavomWmMkl2C0rwNG+zrgYX4JDsSkYd/l+ziXkAldHRH8nE0Q8Dg5t7dRQFTLjmINZWWoi5/G++KdsPOIvPsI4385h02TX0IH22beDE5IfYh0HiclG8D2GWUEgsoOjE9qP5SbqlOUxXUGzEp43DEwgXtv17WyDFNxzcSMcS0qFVTP6KdQcamhvKjqsopbJitI9B639BRWzjN24jpeVnSiVNg+nh4nZUNbrkWn4odAdRTVDBtc0UGzGXxF0Bk1aRJ5xWWQ6oie+US0plJQUo6QX87hQmIWjORibJ7yEtytGv6Y1OTMQoRfT8fpO4/Q290cb/o5tIyOa4Q0JZXqiZ7vj//yr8ur9twXy7hOcRVK8ri7HprDpYcGojNq0uwY6Nbwa7YJ6Ul1sOFtX7z18zlcTs7G2B/PYst/XqpzT3WViuHyvWwciU3HkesZiEuvfKzpkdh0nLmbiWUjGjCwCSGtkVAICKXqTcx1Ia37HSWtAX2LkBeOga4Yv73thzd/OoNr93Px5o9nseU//nA2q/lBIkWlSkTcfsgl59gMPMwv4ZeJhAL4OhnDzdIAG88m4a/L9xGbmou1b3Wu9Y8AlYrhQEwabqTlYpy/IywMdJ+/Uj08zC9B4qMC6EvFMNDVgYGuDvQkOrW+X50Q0rSo6Zu8sLIKSjHmxzO4kZYHa0NdbP2Pv9qTzBhjiH9YgBNxD3A8LgNn4zPVRijTl+qgp5s5+npYopebOYzkXGecCwmZCP3zEtJzSyCXiLB0hBeGdXrWhUIuQR+6loZVR27xZ+YGUh3M7NcO415yhI5IM5cLsgtLsebEHWw4nVBlpDWBgKuPQpdL3mb6UgR5WGBIEz0ylpAXTYvr9b169WosX74caWlp8Pb2xnfffQc/P79qy4aFheHtt99WmyeVSlFcXFxt+adRoiZPephfgtHrInHnQQHsjGX49R0/JD0qxIm4DByPe4CkzEK18rZGMvTxsECQhyVecjF95jX3h/klmL45ChG3uYeujPd3xLzBHmqPXGWMIfx6Or4+cguxqdw9zga6OrA1kuFGGpew3a0MsHhYB/g51/92sqJSJTacjsfaE3eQW8zdEmepkKK0XIW84nK12+meJhIK0LOdOYb72KJve8tnPsqVEFI3LSpRb9myBePHj8fatWvRrVs3rFq1Ctu2bUNcXBwsLCyqlA8LC8P06dMRFxfHzxMIBLC0tKxStjqUqMnT0nOLMXpdJBIeFVZZJhYJ4Otkgt5uFujlZl6nW8iUKoZVR27iu2O3AQDe9kZY/aYPbI1kOB6Xga/Db+FqCne7i75UB+90d8bE7s7Ql+pgy/lkfHnoBrILufs/R/jY4uNB7nVqDi9XqrDt4j2sOnIT6blcM727lQHmDHBHLzdzCAQCMMZQUq5CbnEZ8orLH09luJmejz3R6o+M1ZfqYEAHK4zwsUU3F9Mm67FPSGvUohJ1t27d4Ovri++//x4AoFKpYG9vjw8++AAff/xxlfJhYWGYMWMGsrOz67U/StSkOvezizB6fSSSM4tgY6iLXu4W6NXOHAFtzaDfwA5hx29kYMaWaOQUlcFILoajiRyXHydAuUSEtwOdMLmHC990XiGroBRfHorD5vNJYKz2zeGMcU3pXx6Kw90HBQC4loAP+7XDsE62dUqwtzPysDvqPnZFpSAlu/L2GSuFLqb1ccUYP+2NVU5IS9ZiEnVpaSnkcjm2b9+O4OBgfn5ISAiys7OxZ8+eKuuEhYVh0qRJsLW1hUqlQufOnfH555/D09OzVvukRE2eJb+kHA/zSuBoKtd48knOLETon5f4M1SZWITxAY6Y0sPludeAo5OzMX9PDL9uO0t9tLM0QJlShXIlQ+njv2VKFcpUDNmFpUh83DpgoifB1N5tMfYlhwaNdKZSMVxMysLOSyn4+8p9vgl9gKcVlr3mVeVHBiGkZi3m9qyHDx9CqVRWaba2tLTEjRs3ql3Hzc0Nv/zyCzp27IicnBysWLECAQEBuHbtWrWVLSkpQUlJZe/cvLy8KmUIAbim3YaePT+LvYkc2971x+pjt6FkDBMCnGFuULtOWp3sjbDr/UC+Ofxmej5upufXuI5cIsKk7s6Y/LKLRm6NEwq5SwC+TiZYOLQ9fj2dgOWH4nDwWhqu3MvGqjd8GnQdnRDybC3u9ix/f3/4+/vz7wMCAuDh4YF169ZhyZIlVcovXboUixYtqjKfkKYm1RFhZj+3eq0rEgrwZjcHDOxghb+vpqJMqYJYJIRYJIBYJISOSAiJSAAdoRBiHSG8bA1hotc4Z7lSHRGmvNwG/i5m+GDTJSQ8KsQb6yPxwSuu+OCVthrrpU4I4Wg1UZuZmUEkEiE9PV1tfnp6OqysrGq1DbFYDB8fH9y+fbva5XPnzsXMmTP59ykpKWjfvn39gyZEi4z1JHjrJUdthwEA8LIzxL5pPbBgzzXsuHQP3xy9hcg7j/D1G51gayTTdniEtBpaTdQSiQRdunTB0aNH+WvUKpUKR48exdSpU2u1DaVSiatXr2LQoEHVLpdKpZBKK5sYc3NzGxw3IYSjL9XBV6O80cPVDJ/sjsG5hEwMXHUKX7zWEQO9rFGmVOFeVhESHhUg6VEhEh4VIPFRIRIfFSCnqAxyiQ70pDrQk4i4v1IR9B7PM5SJ8XI7c3R2MKIOa+SFpvWm75kzZyIkJARdu3aFn58fVq1ahYKCAv5e6fHjx8PW1hZLly4FACxevBgvvfQS2rZti+zsbCxfvhyJiYmYNGmSNqtByAst2McWPg5GmLYpCpfv5eC9jZdgY6iL9LwSKGsaQg2lNSwDvjl6C7ZGMrzqbY2h3jZob62gpE1eOFpP1KNHj8aDBw8wf/58pKWloVOnTjh48CDfwSwpKQnCJ8axzcrKwuTJk5GWlgZjY2N06dIFp0+fpuZsQrTM0VQP294NwMrwm1h78g7u53APIdIVC+FkqgcHEzmczPTgaCqHo4keTPQkKCorR0GJEgUl5Sgo5f7ml5SjsLQcyZlFOBqbjpTsIqw7eRfrTt6Fi7kehnS0wdBONmhjrrlxy+sj/mEBDl1LQ14x1zIgl4geT9xrmYRrHbA21IWFonEeB9tQecVliErKRkp2EYI8LGvdwZE0La3fR93U6PYsQhrf3Qf5eJhfCidTOcwNpPU+Cy4qVeJ4XAb2Rt/HsbgMtUefelgr4GwmR8U3GGMAA+NfA9zwps97ilxd3M8uwr4r9/HX5VT+YTW14WmjQJCHJfq2t4SnjfZaBdJyinE+IRMXEjJxPiELN9Jy+THjjeVifDbcC4O8qhnykWhci7mPWhsoURPSMuUVlyH8ejr+unwf/9x6WOOjT59mINVBL3cL9G3PPZddUYdb1h7kleBATCr+unwf5xOy+PkioQCBbc3gYqaHwtJyFJYqH0/lKCpVoqBUiaJSJe7nFOHJb1krhS6C2nOPofVvY9qg+9ufR6ViOBKbjgMxaTifkIl7WVXHfHYwkUNHKMDdh9zDcYZ1ssHioR1gKG8eI949T3puMc7GZ+Jc/CPoS8V4v3ebOv37agsl6hpQoiak5csqKMXxuAzkl3APXhEAgECAivPUihPWmJRcHIlNx4O8ymcpiEUCvORiin6eVujmbILiMiWyC8uQU1SG7KIy5BSW8u+TswpxLj6TP+sUCAA/JxMM8bbBIC/rWt0C9yi/BMfjHuDI9XScuvUAhaVKfplcIkLPduYY95Ij/NuYauxMu2Iktu+O3eKfGw8AQgHQ3kaBro7cPfFdnYxhqdBFabkK3x69hR9O3IaKcc+C//J1b/RsZ66ReDTpXlYhzt7NxLn4TJyNf1Tl0b9tLfTx0/iucHrOaHjaRom6BpSoCXmxqFQM0feycfhaOsKvp+HO48eq1oW3vRGGettgsJc1rAzrf725uEyJyLuPcOR6Oo7EpvPPYAeADrYKTHm5DQZ1sKr3vejlShX2XUnF98dv43YG91AcfakOxvjZ4+V25vBxMK7xoT5RSVn4cOtl/uz6zW4OmDfIo8nHVS8qVSIluxDJWUVIySrCvawiJGcWIjo5W+1RtgD346m9tQK+TiY4GJOGtNxiGMnF+GFsZwS0MWvSuOuCEnUNKFET8mK78yAf4dfTcfhaGm6m58NAl7sVzEgu5v7KJNxruRgmcgkC2pjBwVT+/A3XEWMMMSm52HYxGVsvJKO4jLv+bmcsw8TuzhjV1b7WCbJMqcLuqBSsPn6bP8NU6Org7UBnvBPoXKdm7KJSJb44eANhpxMAcE3jX43yhq9T4zx5rrhMiaOxGTh0LQ2JjwqQkl2Eh/nPvhtAJBTAy9YQ3VxM0M3ZBF0cTWAo4+qXkVuMKb9fRHRyNnSEAiwc6lnr5w7kl5Qj8s4juJjrNUlHRUrUNaBETQhpbjILSvF7ZCJ+jUxAZgGXpAxlYox7yREhAU4wN+CGJc0u4prlswpKkVVYhuzCUmTklWDrhWT++rOxXIxJPVwwzt+xQddqT99+iFnbryAluwgCAeDrZAI7YxlsjWSweTzZGunCxkgGuaRuZ9yMMVxIzMLOS/ew70oq8h4/O/5J+lId2BnLYGcsf/xXBjcrA3R2MK7xB0xxmRIf77iC3dH3AXBDzH76anuIn9FKkfCwAL9FJmLbhWTkPb6U0svNHO8EOqOHq1mjdfyjRF0DStSEkOaquEyJ7Rfv4ad/7vJnxmKRAFIdEX89/lnM9CWY3MMFb73kqLGm6tziMiz56zq2XbxXYzkjuRj2xtztd85menB5/NfJTI8/2wW4pLgzKgW7o1LUxnq3MdTFMB9beNsZwc5YBntjORQynXonScYY1py8g+WH4sAYENjWFKvf7MwPHsMYwz+3HuLX0wk4FpdReZeAQhfpecX8+7YW+ng70AkjfOwgk2i20x8l6hpQoiaENHdKFUP49TSsO3UXUUnZ/HyBgDvTNpZzzfNGj1972xthVFd7jSeTCtfv5+JWRh5SsotwP7sI97OLcT+7CCnZRdWeDT/JVE8CZzM9lKsYopMr66InEWGQlzWGd7bFS86mEDbC+Obh19MxY3MUCkqVcDKV49sxPricnI2w0wlqfRV6u5kjJMAJL7uaIzmrEGGnE7Dtwj3+x5GhTIwxfg4Y7+8IGw09HpcSdQ0oURNCWgrGGBIeFYIxBmO5BAqZuE7jiTeFvOIy3M8uRlJmIeIf5iP+YQHuPihA/MMCZDzR2x7gep33cDXHiM626NfeqtF+WDzpRlouJv16ocqtafpSHbzexQ7j/R3hUs016bziMmy7cA9hpxP4s3+RUICBHazwyeD2DepUCFCirhElakIIaRr5JeVIeMgl7YKScrzibqGVp7Q9yi/Be39cwrmETDib6SHE3xGvdbGr1RCwShXD0dh0bIhIQOTdRzCQ6iDy//o0eEjcFjMeNSGEkNZLX6qDDraG6GBrqNU4TPWl2DTlJdx9kI825vp1amYXCQXo52mFfp5WuH4/F3ce5DfauPXPQomaEEJIqycSCuBqadCgbbS3UaC9jUJDEdUejfBOCCGENGOUqAkhhJBmjBI1IYQQ0oxRoiaEEEKaMUrUhBBCSDP2wvX6Vqm4B9+npqZqORJCCCEvqoocVJGTavLCJer09HQAgJ+fn5YjIYQQ8qJLT0+Hg4NDjWVeuCeTlZeXIyoqCpaWlhAKG9byn5eXh/bt2+P69eswMGjY/Xna0JLjb8mxAxS/NrXk2IGWHX9Ljh3QbPwqlQrp6enw8fGBjk7N58wvXKLWpNzcXBgaGiInJwcKRdPfBN9QLTn+lhw7QPFrU0uOHWjZ8bfk2AHtxU+dyQghhJBmjBI1IYQQ0oxRom4AqVSKBQsWQCqVajuUemnJ8bfk2AGKX5tacuxAy46/JccOaC9+ukZNCCGENGN0Rk0IIYQ0Y5SoCSGEkGaMEjUhhBDSjFGifsrq1avh5OQEXV1ddOvWDefOnaux/LZt2+Du7g5dXV14eXlh//79assZY5g/fz6sra0hk8kQFBSEW7duaT32H3/8ET169ICxsTGMjY0RFBRUpfyECRMgEAjUpgEDBjRK7HWNPywsrEpsurq6amWa8tjXNf5evXpViV8gEGDw4MF8maY6/qdOncKQIUNgY2MDgUCA3bt3P3edEydOoHPnzpBKpWjbti3CwsKqlKnr/6WmiH3nzp3o27cvzM3NoVAo4O/vj0OHDqmVWbhwYZXj7u7urvHY6xP/iRMnqv3cpKWlqZVrimNfn/ir+0wLBAJ4enryZZrq+C9duhS+vr4wMDCAhYUFgoODERcX99z1tPGdT4n6CVu2bMHMmTOxYMECXLp0Cd7e3ujfvz8yMjKqLX/69GmMGTMGEydORFRUFIKDgxEcHIyYmBi+zJdffolvv/0Wa9euxdmzZ6Gnp4f+/fujuLhYq7GfOHECY8aMwfHjxxEZGQl7e3v069cPKSkpauUGDBiA1NRUftq0aZNG465v/ACgUCjUYktMTFRb3lTHvj7x79y5Uy32mJgYiEQijBw5Uq1cUxz/goICeHt7Y/Xq1bUqHx8fj8GDB6N3796Ijo7GjBkzMGnSJLWEV59/z6aI/dSpU+jbty/279+Pixcvonfv3hgyZAiioqLUynl6eqod93///VejcVeoa/wV4uLi1OKzsLDglzXVsQfqHv8333yjFndycjJMTEyqfO6b4vifPHkSoaGhOHPmDMLDw1FWVoZ+/fqhoKDgmeto7TufEZ6fnx8LDQ3l3yuVSmZjY8OWLl1abflRo0axwYMHq83r1q0b+89//sMYY0ylUjErKyu2fPlyfnl2djaTSqVs06ZNWo39aeXl5czAwID9+uuv/LyQkBA2bNgwjcb5LHWNf8OGDczQ0PCZ22vKY89Yw4//119/zQwMDFh+fj4/rymPfwUAbNeuXTWWmT17NvP09FSbN3r0aNa/f3/+fUOPR33UJvbqtG/fni1atIh/v2DBAubt7a25wGqpNvEfP36cAWBZWVnPLKONY89Y/Y7/rl27mEAgYAkJCfw8bR3/jIwMBoCdPHnymWW09Z1PZ9SPlZaW4uLFiwgKCuLnCYVCBAUFITIystp1IiMj1coDQP/+/fny8fHxSEtLUytjaGiIbt26PXObTRX70woLC1FWVgYTExO1+SdOnICFhQXc3Nzw3nvv4dGjRxqLu0J948/Pz4ejoyPs7e0xbNgwXLt2jV/WVMe+IfE/6eeff8Ybb7wBPT09tflNcfzr6nmfe00cj6aiUqmQl5dX5XN/69Yt2NjYwMXFBWPHjkVSUpKWIqxep06dYG1tjb59+yIiIoKf35KOPcB97oOCguDo6Kg2XxvHPycnBwCqfBaepK3vfErUjz18+BBKpRKWlpZq8y0tLatc/6mQlpZWY/mKv3XZZn3UJ/anzZkzBzY2NmofsAEDBuC3337D0aNH8cUXX+DkyZMYOHAglEqlxmKvb/xubm745ZdfsGfPHvzxxx9QqVQICAjAvXv3ADTdsa9v/E86d+4cYmJiMGnSJLX5TXX86+pZn/vc3FwUFRVp5PPYVFasWIH8/HyMGjWKn9etWzeEhYXh4MGDWLNmDeLj49GjRw/k5eVpMVKOtbU11q5dix07dmDHjh2wt7dHr169cOnSJQCa+S5oKvfv38eBAweqfO61cfxVKhVmzJiBwMBAdOjQ4ZnltPWd/8INc0mqWrZsGTZv3owTJ06odch64403+NdeXl7o2LEj2rRpgxMnTqBPnz7aCJXn7+8Pf39//n1AQAA8PDywbt06LFmyRIuR1d3PP/8MLy+vKkOvNufj3xr8+eefWLRoEfbs2aN2jXfgwIH8644dO6Jbt25wdHTE1q1bMXHiRG2EynNzc4Obmxv/PiAgAHfu3MHXX3+N33//XYuR1d2vv/4KIyMjBAcHq83XxvEPDQ1FTExMo/VFaCg6o37MzMwMIpGIH6+6Qnp6OqysrKpdx8rKqsbyFX/rss36qE/sFVasWIFly5bh8OHD6NixY41lXVxcYGZmhtu3bzc45ic1JP4KYrEYPj4+fGxNdeyBhsVfUFCAzZs31+oLqLGOf10963OvUCggk8k08u/Z2DZv3oxJkyZh69atVZoyn2ZkZIR27dpp/bg/i5+fHx9bSzj2ANcz+pdffsG4ceMgkUhqLNvYx3/q1KnYt28fjh8/Djs7uxrLaus7nxL1YxKJBF26dMHRo0f5eSqVCkePHlU7c3uSv7+/WnkACA8P58s7OzvDyspKrUxubi7Onj37zG02VewA1ztxyZIlOHjwILp27frc/dy7dw+PHj2CtbW1RuKuUN/4n6RUKnH16lU+tqY69g2Nf9u2bSgpKcFbb7313P001vGvq+d97jXx79mYNm3ahLfffhubNm1Sux3uWfLz83Hnzh2tH/dniY6O5mNr7se+wsmTJ3H79u1a/UBtrOPPGMPUqVOxa9cuHDt2DM7Ozs9dR2vf+fXuhtYKbd68mUmlUhYWFsauX7/OpkyZwoyMjFhaWhpjjLFx48axjz/+mC8fERHBdHR02IoVK1hsbCxbsGABE4vF7OrVq3yZZcuWMSMjI7Znzx525coVNmzYMObs7MyKioq0GvuyZcuYRCJh27dvZ6mpqfyUl5fHGGMsLy+PffTRRywyMpLFx8ezI0eOsM6dOzNXV1dWXFys0djrE/+iRYvYoUOH2J07d9jFixfZG2+8wXR1ddm1a9fU6tgUx74+8Vfo3r07Gz16dJX5TXn88/LyWFRUFIuKimIA2MqVK1lUVBRLTExkjDH28ccfs3HjxvHl7969y+RyOZs1axaLjY1lq1evZiKRiB08eLDWx0NbsW/cuJHp6Oiw1atXq33us7Oz+TIffvghO3HiBIuPj2cREREsKCiImZmZsYyMDI3GXp/4v/76a7Z7925269YtdvXqVTZ9+nQmFArZkSNH+DJNdezrE3+Ft956i3Xr1q3abTbV8X/vvfeYoaEhO3HihNpnobCwkC/TXL7zKVE/5bvvvmMODg5MIpEwPz8/dubMGX5Zz549WUhIiFr5rVu3snbt2jGJRMI8PT3Z33//rbZcpVKxTz/9lFlaWjKpVMr69OnD4uLitB67o6MjA1BlWrBgAWOMscLCQtavXz9mbm7OxGIxc3R0ZJMnT26U/+z1iX/GjBl8WUtLSzZo0CB26dIlte015bGva/yMMXbjxg0GgB0+fLjKtpry+Ffc8vP0VBFvSEgI69mzZ5V1OnXqxCQSCXNxcWEbNmyost2ajoe2Yu/Zs2eN5RnjbjWztrZmEomE2drastGjR7Pbt29rPPb6xP/FF1+wNm3aMF1dXWZiYsJ69erFjh07VmW7TXHs6xM/Y9ztSjKZjK1fv77abTbV8a8ubgBqn+Xm8p1Po2cRQgghzRhdoyaEEEKaMUrUhBBCSDNGiZoQQghpxihRE0IIIc0YJWpCCCGkGaNETQghhDRjlKgJIYSQZowSNSGEENKMUaImhDQagUCA3bt3azsMQlo0StSEtFITJkyAQCCoMg0YMEDboRFC6oDGoyakFRswYAA2bNigNk8qlWopGkJIfdAZNSGtmFQqhZWVldpkbGwMgGuWXrNmDQYOHAiZTAYXFxds375dbf2rV6/ilVdegUwmg6mpKaZMmYL8/Hy1Mr/88gs8PT0hlUphbW2NqVOnqi1/+PAhhg8fDrlcDldXV+zdu5dflpWVhbFjx8Lc3BwymQyurq5VflgQ8qKjRE3IC+zTTz/Fa6+9hsuXL2Ps2LF44403EBsbCwAoKChA//79YWxsjPPnz2Pbtm04cuSIWiJes2YNQkNDMWXKFFy9ehV79+5F27Zt1faxaNEijBo1CleuXMGgQYMwduxYZGZm8vu/fv06Dhw4gNjYWKxZswZmZmZNdwAIaQkaNPYWIaTZCgkJYSKRiOnp6alNn332GWOMG+bv3XffVVunW7du7L333mOMMbZ+/XpmbGzM8vPz+eV///03EwqF/HCbNjY2bN68ec+MAQD75JNP+Pf5+fkMADtw4ABjjLEhQ4awt99+WzMVJqSVomvUhLRivXv3xpo1a9TmmZiY8K/9/f3Vlvn7+yM6OhoAEBsbC29vb+jp6fHLAwMDoVKpEBcXB4FAgPv376NPnz41xtCxY0f+tZ6eHhQKBTIyMgAA7733Hl577TVcunQJ/fr1Q3BwMAICAupVV0JaK0rUhLRienp6VZqiNUUmk9WqnFgsVnsvEAigUqkAAAMHDkRiYiL279+P8PBw9OnTB6GhoVixYoXG4yWkpaJr1IS8wM6cOVPlvYeHBwDAw8MDly9fRkFBAb88IiICQqEQbm5uMDAwgJOTE44ePdqgGMzNzRESEoI//vgDq1atwvr16xu0PUJaGzqjJqQVKykpQVpamto8HR0dvsPWtm3b0LVrV3Tv3h0bN27EuXPn8PPPPwMAxo4diwULFiAkJAQLFy7EgwcP8MEHH2DcuHGwtLQEACxcuBDvvvsuLCwsMHDgQOTl5SEiIgIffPBBreKbP38+unTpAk9PT5SUlGDfvn38DwVCCIcSNSGt2MGDB2Ftba02z83NDTdu3ADA9cjevHkz3n//fVhbW2PTpk1o3749AEAul+PQoUOYPn06fH19IZfL8dprr2HlypX8tkJCQlBcXIyvv/4aH330EczMzPD666/XOj6JRIK5c+ciISEBMpkMPXr0wObNmzVQc0JaDwFjjGk7CEJI0xMIBNi1axeCg4O1HQohpAZ0jZoQQghpxihRE0IIIc0YXaMm5AVFV70IaRnojJoQQghpxihRE0IIIc0YJWpCCCGkGaNETQghhDRjlKgJIYSQZowSNSGEENKMUaImhBBCmjFK1IQQQkgzRomaEEIIacb+HyAMOobNHZirAAAAAElFTkSuQmCC\n", "text/plain": [ "
" ] @@ -1374,12 +1904,26 @@ } ], "source": [ + "import matplotlib.pyplot as plt\n", "from previous_chapters import plot_losses\n", "\n", + "\n", + "plt.figure(figsize=(12, 6))\n", "epochs_tensor = torch.linspace(0, num_epochs, len(train_losses))\n", "plot_losses(epochs_tensor, tokens_seen, train_losses, val_losses)" ] }, + { + "cell_type": "markdown", + "id": "6777e0c4-d82c-46d8-84fb-1376c4f8bae0", + "metadata": { + "id": "6777e0c4-d82c-46d8-84fb-1376c4f8bae0" + }, + "source": [ + "- As we can see, the loss decreases sharply at the beginning of the first epoch, which means the model starts learning quickly\n", + "- We can see that slight overfitting sets in at around 1 training epoch" + ] + }, { "cell_type": "markdown", "id": "87b79a47-13f9-4d1f-87b1-3339bafaf2a3", @@ -1387,32 +1931,41 @@ "id": "87b79a47-13f9-4d1f-87b1-3339bafaf2a3" }, "source": [ - "## 7.6 Extracting and saving responses" + "## 7.7 Extracting and saving responses" ] }, { - "cell_type": "code", - "execution_count": 33, - "id": "F9QyvnRipwNc", + "cell_type": "markdown", + "id": "5a25cc88-1758-4dd0-b8bf-c044cbf2dd49", "metadata": { - "id": "F9QyvnRipwNc" + "id": "5a25cc88-1758-4dd0-b8bf-c044cbf2dd49" }, - "outputs": [], "source": [ - "def extract_response(response):\n", - " return response[response.find(\"\\n### Response\")+len(\"\\n### Response:\")+1:]" + "" + ] + }, + { + "cell_type": "markdown", + "id": "17510e9d-7727-4d58-ba9a-d82ec23c1427", + "metadata": { + "id": "17510e9d-7727-4d58-ba9a-d82ec23c1427" + }, + "source": [ + "- In this section, we save the test set responses for scoring in the next section\n", + "- We also save a copy of the model for future use\n", + "- But first, let's take a brief look at the responses generated by the finetuned model" ] }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 36, "id": "VQ2NZMbfucAc", "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "VQ2NZMbfucAc", - "outputId": "1fd28d43-3fd4-4d94-a63e-07f4a53f41b6" + "outputId": "6f376ffe-c059-4c15-905b-bf408f2a86f8" }, "outputs": [ { @@ -1442,7 +1995,7 @@ ">> The type of cloud typically associated with thunderstorms is cumulonimbus.\n", "\n", "Model response:\n", - ">> The type of cloud typically associated with thunderstorms is a cumulus (thin, water-filled, or gas-filled).\n", + ">> The type of cloud associated with thunderstorms is a cumulus cloud.\n", "-------------------------------------\n", "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n", "\n", @@ -1461,6 +2014,7 @@ "source": [ "torch.manual_seed(123)\n", "\n", + "\n", "for entry in test_data[:3]:\n", "\n", " input_text = format_input(entry)\n", @@ -1472,8 +2026,8 @@ " context_size=BASE_CONFIG[\"context_length\"],\n", " eos_id=50256\n", " )\n", - " response = token_ids_to_text(token_ids, tokenizer)\n", - " response_text = extract_response(response)\n", + " generated_text = token_ids_to_text(token_ids, tokenizer)\n", + " response_text = generated_text[len(input_text):].replace(\"### Response:\", \"\").strip()\n", "\n", " print(input_text)\n", " print(f\"\\nCorrect response:\\n>> {entry['output']}\")\n", @@ -1481,23 +2035,43 @@ " print(\"-------------------------------------\")" ] }, + { + "cell_type": "markdown", + "id": "49ab64c1-586f-4939-8def-23feeb1b3599", + "metadata": { + "id": "49ab64c1-586f-4939-8def-23feeb1b3599" + }, + "source": [ + "- As we can see based on the test set instructions, given responses, and the model's responses, the model performs relatively well\n", + "- The answers to the first and last instructions are clearly correct\n", + "- The second answer is close; the model answers with \"cumulus cloud\" instead of \"cumulonimbus\" (however, note that cumulus clouds can develop into cumulonimbus clouds, which are capable of producing thunderstorms)\n", + "- Most importantly, we can see that model evaluation is not as straightforward as in the previous chapter, where we just had to calculate the percentage of correct spam/non-spam class labels to obtain the classification accuracy\n", + "- In practice, instruction-finetuned LLMs such as chatbots are evaluated via multiple approaches\n", + " - short-answer and multiple choice benchmarks such as MMLU (\"Measuring Massive Multitask Language Understanding\", [https://arxiv.org/abs/2009.03300](https://arxiv.org/abs/2009.03300)), which test the knowledge of a model\n", + " - human preference comparison to other LLMs, such as LMSYS chatbot arena ([https://arena.lmsys.org](https://arena.lmsys.org))\n", + " - automated conversational benchmarks, where another LLM like GPT-4 is used to evaluate the responses, such as AlpacaEval ([https://tatsu-lab.github.io/alpaca_eval/](https://tatsu-lab.github.io/alpaca_eval/))\n", + "\n", + "- In the next section, we will use an approach similar to AlpaceEval and use another LLM to evaluate the responses of our model; however, we will use our own test set instead of using a publicly available benchmark dataset\n", + "- For this, we add the model response to the `test_set` dictionary and save it as a `\"instruction-data-with-response.json\"` file for record-keeping so that we can load and analyze it in separate Python sessions if needed" + ] + }, { "cell_type": "code", - "execution_count": 35, + "execution_count": 37, "id": "-PNGKzY4snKP", "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "-PNGKzY4snKP", - "outputId": "3e16caff-287a-4084-ed93-fcccd68e1da7" + "outputId": "4b631b9c-73bf-4cdd-dc78-ddcd3a03f934" }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "100%|██████████| 110/110 [01:17<00:00, 1.42it/s]\n" + "100%|██████████| 110/110 [01:05<00:00, 1.68it/s]\n" ] } ], @@ -1515,8 +2089,8 @@ " context_size=BASE_CONFIG[\"context_length\"],\n", " eos_id=50256\n", " )\n", - " response = token_ids_to_text(token_ids, tokenizer)\n", - " response_text = extract_response(response)\n", + " generated_text = token_ids_to_text(token_ids, tokenizer)\n", + " response_text = generated_text[len(input_text):].replace(\"### Response:\", \"\").strip()\n", "\n", " test_data[i][\"model_response\"] = response_text\n", "\n", @@ -1525,46 +2099,60 @@ " json.dump(test_data, file, indent=4) # \"indent\" for pretty-printing" ] }, + { + "cell_type": "markdown", + "id": "228d6fa7-d162-44c3-bef1-4013c027b155", + "metadata": { + "id": "228d6fa7-d162-44c3-bef1-4013c027b155" + }, + "source": [ + "- Let's double-check one of the entries to see whether the responses have been added to the `test_set` dictionary correctly" + ] + }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 38, "id": "u-AvCCMTnPSE", "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "u-AvCCMTnPSE", - "outputId": "90c7f165-713e-4795-9205-f2f9b4d13313" + "outputId": "bad28133-b088-4cdf-8056-68159268a48e" }, "outputs": [ { - "data": { - "text/plain": [ - "{'instruction': 'Rewrite the sentence using a simile.',\n", - " 'input': 'The car is very fast.',\n", - " 'output': 'The car is as fast as lightning.',\n", - " 'model_response': 'The car is as fast as a bullet.'}" - ] - }, - "execution_count": 36, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "{'instruction': 'Rewrite the sentence using a simile.', 'input': 'The car is very fast.', 'output': 'The car is as fast as lightning.', 'model_response': 'The car is as fast as a bullet.'}\n" + ] } ], "source": [ - "test_data[0]" + "print(test_data[0])" + ] + }, + { + "cell_type": "markdown", + "id": "c1b2f3f6-8569-405a-9db6-d47cba65608a", + "metadata": { + "id": "c1b2f3f6-8569-405a-9db6-d47cba65608a" + }, + "source": [ + "- Finally, we also save the model in case we want to reuse it in the future" ] }, { "cell_type": "code", - "execution_count": 37, + "execution_count": 39, "id": "8cBU0iHmVfOI", "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "8cBU0iHmVfOI", - "outputId": "df6e862f-a6c8-4d23-ac3a-7645fd25a59d" + "outputId": "860a2d06-2d0e-4ae8-943d-dd12d299eed9" }, "outputs": [ { @@ -1578,9 +2166,13 @@ "source": [ "import re\n", "\n", + "model.cpu()\n", "file_name = f\"{re.sub(r'[ ()]', '', CHOOSE_MODEL) }-sft.pth\"\n", "torch.save(model.state_dict(), file_name)\n", - "print(f\"Model saved as {file_name}\")" + "print(f\"Model saved as {file_name}\")\n", + "\n", + "# Load model via\n", + "# model.load_state_dict(torch.load(\"gpt2-medium355M-sft.pth\"))" ] }, { @@ -1590,7 +2182,121 @@ "id": "obgoGI89dgPm" }, "source": [ - "## 7.7 Evaluating the finetuned LLM" + "## 7.8 Evaluating the finetuned LLM" + ] + }, + { + "cell_type": "markdown", + "id": "805b9d30-7336-499f-abb5-4a21be3129f5", + "metadata": { + "id": "805b9d30-7336-499f-abb5-4a21be3129f5" + }, + "source": [ + "" + ] + }, + { + "cell_type": "markdown", + "id": "68d2b9d3-b6ff-4533-a89d-7b66079b4fd1", + "metadata": { + "id": "68d2b9d3-b6ff-4533-a89d-7b66079b4fd1" + }, + "source": [ + "- In this section, we automate the response evaluation of the finetuned LLM using another, larger LLM\n", + "- In particular, we use an instruction-finetuned 8 billion parameter Llama 3 model by Meta AI that can be run locally via ollama ([https://ollama.com](https://ollama.com))\n", + "- (Alternatively, if you prefer using a more capable LLM like OpenAI's GPT-4 via the ChatGPT API, please see the [../03_model-evaluation/llm-instruction-eval-ollama.ipynb](../03_model-evaluation/llm-instruction-eval-ollama.ipynb) notebook)" + ] + }, + { + "cell_type": "markdown", + "id": "ea427a30-36ba-44e3-bb1f-eb0d7008d6e9", + "metadata": { + "id": "ea427a30-36ba-44e3-bb1f-eb0d7008d6e9" + }, + "source": [ + "- Ollama is an application to run LLMs efficiently\n", + "- It is a wrapper around llama.cpp ([https://github.com/ggerganov/llama.cpp](https://github.com/ggerganov/llama.cpp)), which implements LLMs in pure C/C++ to maximize efficiency\n", + "- Note that it is a tool for using LLMs to generate text (inference), not training or finetuning LLMs\n", + "- Before running the code below, install ollama by visiting [https://ollama.com](https://ollama.com) and following the instructions (for instance, clicking on the \"Download\" button and downloading the ollama application for your operating system)" + ] + }, + { + "cell_type": "markdown", + "id": "747a2fc7-282d-47ec-a987-ed0a23ed6822", + "metadata": { + "id": "747a2fc7-282d-47ec-a987-ed0a23ed6822" + }, + "source": [ + "- For macOS and Windows users, click on the ollama application you downloaded; if it prompts you to install the command line usage, say \"yes\"\n", + "- Linux users can use the installation command provided on the ollama website\n", + "\n", + "- In general, before we can use ollama from the command line, we have to either start the ollama application or run `ollama serve` in a separate terminal\n", + "\n", + "\n", + "\n", + "\n", + "- With the ollama application or `ollama serve` running in a different terminal, on the command line, execute the following command to try out the 8 billion parameters Llama 3 model (the model, which takes up 4.7 GB of storage space, will be automatically downloaded the first time you execute this command)\n", + "\n", + "```bash\n", + "# 8B model\n", + "ollama run llama3\n", + "```\n", + "\n", + "\n", + "The output looks like as follows:\n", + "\n", + "```\n", + "$ ollama run llama3\n", + "pulling manifest\n", + "pulling 6a0746a1ec1a... 100% ▕████████████████▏ 4.7 GB\n", + "pulling 4fa551d4f938... 100% ▕████████████████▏  12 KB\n", + "pulling 8ab4849b038c... 100% ▕████████████████▏  254 B\n", + "pulling 577073ffcc6c... 100% ▕████████████████▏  110 B\n", + "pulling 3f8eb4da87fa... 100% ▕████████████████▏  485 B\n", + "verifying sha256 digest\n", + "writing manifest\n", + "removing any unused layers\n", + "success\n", + "```\n", + "\n", + "- Note that `llama3` refers to the instruction finetuned 8 billion Llama 3 model\n", + "\n", + "- Using ollama with the `\"llama3\"` model (a 8B parameter model) requires 16 GB of RAM; if this is not supported by your machine, you can try the smaller model, such as the 3.8B parameter phi-3 model by setting `model = \"phi-3\"`, which only requires 8 Gb of RAM\n", + "\n", + "- Alternatively, you can also use the larger 70 billion parameters Llama 3 model, if your machine supports it, by replacing `llama3` with `llama3:70b`\n", + "\n", + "- After the download has been completed, you will see a command line prompt that allows you to chat with the model\n", + "\n", + "- Try a prompt like \"What do llamas eat?\", which should return an output similar to the following:\n", + "\n", + "```\n", + ">>> What do llamas eat?\n", + "Llamas are ruminant animals, which means they have a four-chambered\n", + "stomach and eat plants that are high in fiber. In the wild, llamas\n", + "typically feed on:\n", + "1. Grasses: They love to graze on various types of grasses, including tall\n", + "grasses, wheat, oats, and barley.\n", + "```" + ] + }, + { + "cell_type": "markdown", + "id": "7b7b341c-ba0e-40bb-a52c-cb328bbd1fe4", + "metadata": { + "id": "7b7b341c-ba0e-40bb-a52c-cb328bbd1fe4" + }, + "source": [ + "- You can end this session using the input `/bye`" + ] + }, + { + "cell_type": "markdown", + "id": "faaf3e02-8ca0-4edf-be23-60625a5b14e3", + "metadata": { + "id": "faaf3e02-8ca0-4edf-be23-60625a5b14e3" + }, + "source": [ + "- The following code checks whether the ollama session is running correctly before proceeding to use ollama to evaluate the test set responses we generated in the previous section" ] }, { @@ -1598,8 +2304,12 @@ "execution_count": 1, "id": "026e8570-071e-48a2-aa38-64d7be35f288", "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 193 + }, "id": "026e8570-071e-48a2-aa38-64d7be35f288", - "outputId": "ad2e3f89-30a0-4f8b-9d6f-24acf6cf5153" + "outputId": "c0f1d14f-d545-4605-a1ee-0f1eacd98cf6" }, "outputs": [ { @@ -1632,12 +2342,14 @@ "cell_type": "code", "execution_count": 2, "id": "723c9b00-e3cd-4092-83c3-6e48b5cf65b0", - "metadata": {}, + "metadata": { + "id": "723c9b00-e3cd-4092-83c3-6e48b5cf65b0" + }, "outputs": [], "source": [ - "# This cell is optional; it allows you to restart the notebook \n", - "# and only run section 7.7 without rerunning any of the previous cod\n", - "import json \n", + "# This cell is optional; it allows you to restart the notebook\n", + "# and only run section 7.7 without rerunning any of the previous code\n", + "import json\n", "from tqdm import tqdm\n", "\n", "file_path = \"instruction-data-with-response.json\"\n", @@ -1658,29 +2370,49 @@ " return instruction_text + input_text" ] }, + { + "cell_type": "markdown", + "id": "b3464705-d026-4594-977f-fb357e51c3a9", + "metadata": { + "id": "b3464705-d026-4594-977f-fb357e51c3a9" + }, + "source": [ + "- Now, an alternative way to the `ollama run` command we used earlier to interact with the model is via its REST API in Python via the following function\n", + "- Before you run the next cells in this notebook, make sure that ollama is still running (the previous code cells should print `\"Ollama running: True\"`)\n", + "- Next, run the following code cell to query the model" + ] + }, { "cell_type": "code", "execution_count": 3, "id": "e3ae0e10-2b28-42ce-8ea2-d9366a58088f", "metadata": { - "id": "e3ae0e10-2b28-42ce-8ea2-d9366a58088f", - "outputId": "9ca4ec2b-09d2-4447-da42-c1b81b93333a" + "id": "e3ae0e10-2b28-42ce-8ea2-d9366a58088f" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Llamas are ruminant animals, which means they have a four-chambered stomach and feed on plant-based foods. Their diet typically consists of:\n", + "Llamas are ruminant animals, which means they have a four-chambered stomach that allows them to digest plant-based foods. Their diet typically consists of:\n", "\n", - "1. Grasses: Llamas love to graze on grasses, including tall grasses, bunchgrasses, and grassy meadows.\n", - "2. Hay: High-quality hay is a staple in many llama diets. Timothy hay, alfalfa hay, and oat hay are all popular choices.\n", - "3. Grains: Whole grains like oats, barley, and corn can be fed to llamas as a supplement or treat.\n", - "4. Leaves: Llamas enjoy munching on leaves from trees and shrubs, such as willow, cottonwood, and juniper.\n", - "5. Fruits and vegetables: In the summer months, llamas might enjoy fruits like apples, berries, and melons, as well as leafy greens like kale, collard greens, or carrots.\n", - "6. Pellets: A high-fiber pellet specifically formulated for llamas can be a convenient and nutritious addition to their diet.\n", + "1. Grasses: Llamas love to graze on grasses, including tall grasses, short grasses, and even weeds.\n", + "2. Hay: Hay is a common staple in a llama's diet. They enjoy high-quality hay like timothy hay, alfalfa hay, or oat hay.\n", + "3. Fruits and vegetables: Llamas will eat fruits and veggies as treats or as part of their regular diet. Favorites include apples, carrots, sweet potatoes, and leafy greens like kale or spinach.\n", + "4. Grains: Whole grains like oats, barley, and corn can be fed to llamas as a supplement.\n", + "5. Minerals: Llamas need access to minerals like calcium, phosphorus, and salt to stay healthy.\n", "\n", - "It's essential to provide llamas with access to fresh water at all times and ensure they have a reliable source of fiber-rich foods to maintain their digestive health. Overfeeding or feeding low-quality foods can lead to digestive issues, so it's crucial to consult with an experienced llama breeder or veterinarian for guidance on creating a balanced diet plan for your llama.\n" + "In the wild, llamas might eat:\n", + "\n", + "* Leaves from shrubs and trees\n", + "* Bark\n", + "* Twigs\n", + "* Fruits\n", + "* Roots\n", + "\n", + "Domesticated llamas, on the other hand, are usually fed a diet of hay, grains, and fruits/veggies. Their nutritional needs can be met with a balanced feed that includes essential vitamins and minerals.\n", + "\n", + "Keep in mind that llamas have specific dietary requirements, and their food should be tailored to their individual needs. It's always best to consult with a veterinarian or experienced llama breeder to determine the best diet for your llama.\n" ] } ], @@ -1731,7 +2463,7 @@ "id": "207ae28f-0f8c-4fda-aeef-e7e3046249cc" }, "source": [ - "- Using ollama with the `\"llama3\"` model (a 8B parameter model) requires 16 GB of RAM; if this is not supported by your machine, you can try the smaller model, such as the 3.8B parameter phi-3 model by setting `model = \"phi-3\"`, which only requires 8 Gb of RAM" + "- Now, using the `query_model` function we defined above, we can evaluate the responses of our finetuned model; let's try it out on the first 3 test set responses we looked at in a previous section" ] }, { @@ -1739,8 +2471,7 @@ "execution_count": 4, "id": "86b839d4-064d-4178-b2d7-01691b452e5e", "metadata": { - "id": "86b839d4-064d-4178-b2d7-01691b452e5e", - "outputId": "6c003d5f-65e3-4316-861b-c35bae6b2ca7" + "id": "86b839d4-064d-4178-b2d7-01691b452e5e" }, "outputs": [ { @@ -1755,15 +2486,17 @@ ">> The car is as fast as a bullet.\n", "\n", "Score:\n", - ">> To evaluate the model's response, I'll consider the following factors:\n", + ">> A scoring task!\n", "\n", - "1. Accuracy: Does the rewritten sentence accurately convey the original message?\n", - "2. Creativity: Is the chosen analogy unique and engaging?\n", - "3. Relevance: Is the comparison relevant to the original sentence?\n", + "To evaluate the model response \"The car is as fast as a bullet.\", I'll consider how well it follows the instruction and uses a simile that's coherent, natural-sounding, and effective in conveying the idea of speed.\n", "\n", - "The model's response, \"The car is as fast as a bullet,\" scores high in accuracy (it conveys the idea that the car is very fast) and creativity (using a bullet as an analogy is unexpected). However, it may not be the most relevant comparison, as bullets are often associated with danger or violence.\n", + "Here are some factors to consider:\n", "\n", - "Using these criteria, I'd score the model's response around 85 out of 100. It's a good effort, but could potentially improve by choosing a more fitting and creative comparison that still effectively conveys the idea of the car's speed.\n", + "1. **Follows instruction**: Yes, the model uses a simile to rewrite the sentence.\n", + "2. **Coherence and naturalness**: The comparison between the car's speed and a bullet is common and easy to understand. It's a good choice for a simile that conveys the idea of rapid movement.\n", + "3. **Effectiveness in conveying idea of speed**: A bullet is known for its high velocity, which makes it an excellent choice to describe a fast-moving car.\n", + "\n", + "Considering these factors, I'd score the model response \"The car is as fast as a bullet.\" around 85 out of 100. The simile is well-chosen, coherent, and effectively conveys the idea of speed. Well done, model!\n", "\n", "-------------------------\n", "\n", @@ -1771,14 +2504,18 @@ ">> The type of cloud typically associated with thunderstorms is cumulonimbus.\n", "\n", "Model response:\n", - ">> The type of cloud typically associated with thunderstorms is a cumulus (thin, water-filled, or gas-filled).\n", + ">> The type of cloud associated with thunderstorms is a cumulus cloud.\n", "\n", "Score:\n", - ">> To evaluate the model's response, I'll consider its accuracy and completeness in addressing the original instruction.\n", + ">> A scoring task!\n", "\n", - "The model's response partially addresses the instruction by mentioning that cumulus clouds are associated with thunderstorms. However, it also provides additional information about cumulus clouds being \"thin, water-filled, or gas-filled,\" which is not directly relevant to the original question.\n", + "I'll evaluate the model's response based on its accuracy and relevance to the original instruction.\n", "\n", - "Given these factors, I would score the model's response as 60 out of 100. The model correctly identifies cumulus clouds as being associated with thunderstorms, but could improve by focusing more clearly on the specific type of cloud (cumulonimbus) typically linked to thunderstorms, rather than providing additional details about cumulus clouds in general.\n", + "**Accuracy:** The model's response is partially correct. Cumulus clouds are indeed associated with fair weather and not typically linked to thunderstorms. The correct answer, cumulonimbus, is a type of cloud that is closely tied to thunderstorm formation.\n", + "\n", + "**Relevance:** The model's response is somewhat relevant, as it mentions clouds in the context of thunderstorms. However, the specific type of cloud mentioned (cumulus) is not directly related to thunderstorms.\n", + "\n", + "Considering these factors, I would score the model response a **40 out of 100**. While the response attempts to address the instruction, it provides an incorrect answer and lacks relevance to the original question.\n", "\n", "-------------------------\n", "\n", @@ -1791,23 +2528,17 @@ "Score:\n", ">> A simple one!\n", "\n", - "The input instruction asks me to \"Name the author of 'Pride and Prejudice'.\"\n", + "My model response: \"The author of 'Pride and Prejudice' is Jane Austen.\"\n", "\n", - "My response: `Jane Austen.`\n", + "Score: **99**\n", "\n", - "And that's correct! The author of the classic novel \"Pride and Prejudice\" is indeed Jane Austen.\n", + "Reasoning:\n", "\n", - "Now, let's score my response on a scale from 0 to 100:\n", + "* The response directly answers the question, providing the correct name of the author.\n", + "* The sentence structure is clear and easy to understand.\n", + "* There's no room for misinterpretation or ambiguity.\n", "\n", - "**Accuracy:** 10/10 (I got it right!)\n", - "\n", - "**Clarity:** 9/10 (My response was brief and to the point.)\n", - "\n", - "**Relevance:** 10/10 (The answer is directly related to the question.)\n", - "\n", - "**Overall:** 92/100\n", - "\n", - "So, my score for this response is a solid 92 out of 100!\n", + "Overall, a perfect score!\n", "\n", "-------------------------\n" ] @@ -1830,20 +2561,31 @@ " print(\"\\n-------------------------\")" ] }, + { + "cell_type": "markdown", + "id": "b114fd65-9cfb-45f6-ab74-8331da136bf3", + "metadata": { + "id": "b114fd65-9cfb-45f6-ab74-8331da136bf3" + }, + "source": [ + "- As we can see, the Llama 3 model provides a reasonable evaluation and also gives partial points if a model is not entirely correct, as we can see based on the \"cumulus cloud\" answer\n", + "- Note that the previous prompt returns very verbose evaluations; we can tweak the prompt to generate integer responses in the range between 0 and 100 (where 100 is best) to calculate an average score for our model\n", + "- The evaluation of the 110 entries in the test set takes about 1 minute on an M3 MacBook Air laptop" + ] + }, { "cell_type": "code", "execution_count": 5, "id": "9d7bca69-97c4-47a5-9aa0-32f116fa37eb", "metadata": { - "id": "9d7bca69-97c4-47a5-9aa0-32f116fa37eb", - "outputId": "bf585ec4-0f49-4bc7-89e3-6b47828ac6d4" + "id": "9d7bca69-97c4-47a5-9aa0-32f116fa37eb" }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "Scoring entries: 100%|████████████████████████| 110/110 [01:11<00:00, 1.55it/s]" + "Scoring entries: 100%|████████████████████████| 110/110 [01:10<00:00, 1.56it/s]" ] }, { @@ -1851,7 +2593,7 @@ "output_type": "stream", "text": [ "Number of scores: 110 of 110\n", - "Average score: 52.88\n", + "Average score: 54.16\n", "\n" ] }, @@ -1889,6 +2631,17 @@ "print(f\"Average score: {sum(scores)/len(scores):.2f}\\n\")" ] }, + { + "cell_type": "markdown", + "id": "407f08d5-9ada-4301-9ebc-f0533c76d3f2", + "metadata": { + "id": "407f08d5-9ada-4301-9ebc-f0533c76d3f2" + }, + "source": [ + "- Our model achieves an average score of above 50, which we can use as a reference point to compare the model to other models or to try out other training settings that may improve the model\n", + "- Note that ollama is not fully deterministic (as of this writing), so the numbers you are getting might slightly differ from the ones shown above" + ] + }, { "cell_type": "markdown", "id": "6408768b-2784-44f1-b48e-aed0c1eb9b94", @@ -1908,7 +2661,38 @@ "id": "412d7325-284a-446c-92a1-5aa8acc52dee" }, "source": [ - "## 7.8 Conclusions" + "## 7.9 Conclusions" + ] + }, + { + "cell_type": "markdown", + "id": "tIbNMluCDjVM", + "metadata": { + "id": "tIbNMluCDjVM" + }, + "source": [ + "### 7.9.1 What's next\n", + "\n", + "- This marks the final chapter of this book\n", + "- We covered the major steps of the LLM development cycle: implementing an LLM architecture, pretraining an LLM, and finetuning it\n", + "\n", + "\n", + "\n", + "- An optional step that is sometimes followed after instruction finetuning, as described in this chapter, is preference finetuning\n", + "- Preference finetuning process can be particularly useful for customizing a model to better align with specific user preferences; see the [../04_preference-tuning-with-dpo](../04_preference-tuning-with-dpo) folder if you are interested in this\n", + "\n", + "- This GitHub repository also contains a large selection of additional bonus material you may enjoy; for more information, please see the [Bonus Material](https://github.com/rasbt/LLMs-from-scratch?tab=readme-ov-file#bonus-material) section on this repository's README page\n", + "\n", + "### 7.9.2 Staying up to date in a fast-moving field\n", + "\n", + "- No code in this section\n", + "\n", + "### 7.9.3 Final words\n", + "\n", + "- I hope you enjoyed this journey of implementing an LLM from the ground up and coding the pretraining and finetuning functions\n", + "- In my opinion, implementing an LLM from scratch is the best way to understand how LLMs work; I hope you gained a better understanding through this approach\n", + "- While this book serves educational purposes, you may be interested in using different and more powerful LLMs for real-world applications\n", + " - For this, you may consider popular tools such as axolotl ([https://github.com/OpenAccess-AI-Collective/axolotl](https://github.com/OpenAccess-AI-Collective/axolotl)) or LitGPT ([https://github.com/Lightning-AI/litgpt](https://github.com/Lightning-AI/litgpt), which I help developing" ] }, { @@ -1918,14 +2702,16 @@ "id": "f9853e7f-a81a-4806-9728-be1690807185" }, "source": [ - "## Summary" + "## Summary\n", + "\n", + "- No code in this section" ] } ], "metadata": { "accelerator": "GPU", "colab": { - "gpuType": "L4", + "gpuType": "A100", "machine_shape": "hm", "provenance": [] }, @@ -1944,7 +2730,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.6" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/ch07/01_main-chapter-code/instruction-data-with-response.json b/ch07/01_main-chapter-code/instruction-data-with-response.json new file mode 100644 index 0000000..1d19190 --- /dev/null +++ b/ch07/01_main-chapter-code/instruction-data-with-response.json @@ -0,0 +1,662 @@ +[ + { + "instruction": "Rewrite the sentence using a simile.", + "input": "The car is very fast.", + "output": "The car is as fast as lightning.", + "model_response": "The car is as fast as a bullet." + }, + { + "instruction": "What type of cloud is typically associated with thunderstorms?", + "input": "", + "output": "The type of cloud typically associated with thunderstorms is cumulonimbus.", + "model_response": "The type of cloud associated with thunderstorms is a cumulus cloud." + }, + { + "instruction": "Name the author of 'Pride and Prejudice'.", + "input": "", + "output": "Jane Austen.", + "model_response": "The author of 'Pride and Prejudice' is Jane Austen." + }, + { + "instruction": "What is the periodic symbol for chlorine?", + "input": "", + "output": "The periodic symbol for chlorine is Cl.", + "model_response": "The periodic symbol for chlorine is C." + }, + { + "instruction": "Correct the punctuation in the sentence.", + "input": "Its time to go home.", + "output": "The corrected sentence should be: 'It's time to go home.'", + "model_response": "It's time to go home." + }, + { + "instruction": "Rewrite the sentence.", + "input": "The lecture was delivered in a clear manner.", + "output": "The lecture was delivered clearly.", + "model_response": "The lecture was delivered in a clear manner." + }, + { + "instruction": "Generate a humorous anecdote.", + "input": "", + "output": "Why was the math book sad? Because it had too many problems!", + "model_response": "The cat jumped over the fence." + }, + { + "instruction": "Identify the correct spelling: 'recieve' or 'receive'.", + "input": "", + "output": "The correct spelling is 'receive'.", + "model_response": "The correct spelling is 'receive'." + }, + { + "instruction": "Create a sentence using the word 'nostalgia'.", + "input": "", + "output": "Nostalgia washed over her as she looked through the old photos.", + "model_response": "Nostalgia is a strong emotion that can be felt in the past." + }, + { + "instruction": "Classify the following numbers as prime or composite.", + "input": ": 11, 14, 19.", + "output": "Prime numbers: 11, 19\nComposite numbers: 14", + "model_response": "Prime numbers: 11, 14, 19.Composite numbers: 14, 19." + }, + { + "instruction": "What is the capital of Denmark?", + "input": "", + "output": "The capital of Denmark is Copenhagen.", + "model_response": "The capital of Denmark is Copenhagen." + }, + { + "instruction": "What is the opposite of 'wet'?", + "input": "", + "output": "The opposite of 'wet' is 'dry'.", + "model_response": "The opposite of 'wet' is 'dry'." + }, + { + "instruction": "Identify the type of sentence.", + "input": "Did you finish the report?", + "output": "The type of sentence is interrogative.", + "model_response": "The type of sentence is imperative." + }, + { + "instruction": "What is the chemical symbol for mercury?", + "input": "", + "output": "Hg.", + "model_response": "The chemical symbol for mercury is Hg." + }, + { + "instruction": "Describe the process of active transport in cells.", + "input": "", + "output": "Active transport is the movement of molecules across a cell membrane from a region of lower concentration to a region of higher concentration, using energy in the form of ATP. This process is crucial for maintaining cellular homeostasis.", + "model_response": "Transport is the process by which a substance moves from one place to another by chemical change. It typically occurs by chemical reactions between atoms or molecules." + }, + { + "instruction": "Classify the following items as either solid, liquid, or gas.", + "input": "Mercury, oxygen, wood", + "output": "Mercury - Liquid\nOxygen - Gas\nWood - Solid", + "model_response": ", water\n\n\nSolid: Carbon\nLiquefied Carbon: Oxygen\nGas: Carbon dioxide\nGaseous Carbon: Carbon monoxide" + }, + { + "instruction": "Convert 3 kilometers to meters.", + "input": "", + "output": "3 kilometers is 3000 meters.", + "model_response": "3 kilometers is 3000 meters." + }, + { + "instruction": "Convert the sentence to use an indefinite pronoun.", + "input": "Someone left a note.", + "output": "A note was left by someone.", + "model_response": "A note was left by someone." + }, + { + "instruction": "Provide a synonym for 'excited'.", + "input": "", + "output": "A synonym for 'excited' is 'thrilled'.", + "model_response": "A synonym for 'excited' is 'enthusiastic'." + }, + { + "instruction": "Generate a sentence that follows the pattern: \"Never have I ever _____ without _____\"", + "input": "", + "output": "Never have I ever traveled without a map.", + "model_response": "Never have I ever had without having." + }, + { + "instruction": "Pick out the adjective from the following list.", + "input": "run, tall, quickly", + "output": "The correct adjective from the list is 'tall.'", + "model_response": "The adjective from the list is 'quick'." + }, + { + "instruction": "Convert 1000 grams to kilograms.", + "input": "", + "output": "1000 grams is equal to 1 kilogram.", + "model_response": "1000 grams is 0.1 kilograms." + }, + { + "instruction": "What is the opposite of 'deep'?", + "input": "", + "output": "The opposite of 'deep' is 'shallow'.", + "model_response": "The opposite of 'deep' is 'light'." + }, + { + "instruction": "Categorize the given list of animals.", + "input": "Shark, Dolphin, Trout", + "output": "Fish: Shark, Trout\nMammals: Dolphin", + "model_response": "Animals: Shark, Dolphin, Trout" + }, + { + "instruction": "Translate 'library' into Spanish.", + "input": "", + "output": "The Spanish word for 'library' is 'biblioteca'.", + "model_response": "The Spanish translation of 'library' is 'l\u00edmite'." + }, + { + "instruction": "Define the term 'hyperbole'.", + "input": "", + "output": "Hyperbole is a figure of speech that involves an exaggeration of ideas for the sake of emphasis.", + "model_response": "Hyperbole is a word used to describe a statement that is exaggerated or exaggerated in meaning." + }, + { + "instruction": "Translate 'Hello' into Russian.", + "input": "", + "output": "\u041f\u0440\u0438\u0432\u0435\u0442 (Privet)", + "model_response": "The Russian translation of 'Hello' is '\u0420\u0443\u0441\u0441\u043a\u0438\u0439 \u043f\u0440\u043e\u0438\u0437\u0432\u0435\u0434\u0435\u043d\u0438\u0439' (Vyacheslav Krasnoyarski)." + }, + { + "instruction": "Define the term 'kinetic energy'.", + "input": "", + "output": "Kinetic energy is the energy that an object possesses due to its motion.", + "model_response": "Kinetic energy is the force that causes an object to accelerate or decelerate in response to an applied force." + }, + { + "instruction": "What is the opposite of 'hot'?", + "input": "", + "output": "The opposite of 'hot' is 'cold'.", + "model_response": "The opposite of 'hot' is 'cold'." + }, + { + "instruction": "Convert 5 miles to kilometers.", + "input": "", + "output": "5 miles is approximately 8.05 kilometers.", + "model_response": "5 miles is 5000 meters." + }, + { + "instruction": "What is the chemical formula for magnesium sulfate?", + "input": "", + "output": "The chemical formula for magnesium sulfate is MgSO4.", + "model_response": "The chemical formula for magnesium sulfate is MgSO4." + }, + { + "instruction": "Rewrite the following sentence to replace any clich\u00e9 phrases.", + "input": "It's a piece of cake", + "output": "It's very easy.", + "model_response": ".\n\n\nA piece of cake is a cake." + }, + { + "instruction": "List five different types of vegetables.", + "input": "", + "output": "1. Carrot\n2. Broccoli\n3. Cucumber\n4. Tomato\n5. Spinach", + "model_response": "1. Cucumber\n2. Broccoli\n3. Spinach\n4. Tomato\n5. Cucumber" + }, + { + "instruction": "Convert 7 kilometers to meters.", + "input": "", + "output": "7 kilometers is 7000 meters.", + "model_response": "7 kilometers is 7.048 meters." + }, + { + "instruction": "What is the opposite of 'heavy'?", + "input": "", + "output": "The opposite of 'heavy' is 'light'.", + "model_response": "The opposite of 'heavy' is 'light'." + }, + { + "instruction": "What is the past tense of 'sing'?", + "input": "", + "output": "The past tense of 'sing' is 'sang.'", + "model_response": "The past tense of 'sing' is 'to sing.'" + }, + { + "instruction": "What is the molecular formula for carbon dioxide?", + "input": "", + "output": "The molecular formula for carbon dioxide is CO2.", + "model_response": "The molecular formula for carbon dioxide is CO2." + }, + { + "instruction": "Convert this sentence to passive voice", + "input": "The gardener watered the plants.", + "output": "The plants were watered by the gardener.", + "model_response": "The plants were watered by the gardener." + }, + { + "instruction": "What is the past tense of 'throw'?", + "input": "", + "output": "The past tense of 'throw' is 'threw'.", + "model_response": "The past tense of 'throw' is 'to throw'." + }, + { + "instruction": "Explain what a sonnet is.", + "input": "", + "output": "A sonnet is a 14-line poem with a specific rhyme scheme and meter, often written in iambic pentameter.", + "model_response": "A sonnet is a poem composed in the first person." + }, + { + "instruction": "Generate a sentence using the word 'innovative'.", + "input": "", + "output": "The company is known for its innovative products.", + "model_response": "She was very innovative and always had a new idea." + }, + { + "instruction": "Provide the plural form of 'cactus'.", + "input": "", + "output": "The plural form of 'cactus' is 'cacti'.", + "model_response": "The plural form of 'cactus' is 'cacti'." + }, + { + "instruction": "Translate the phrase 'Where is the bathroom?' into German.", + "input": "", + "output": "The German translation of 'Where is the bathroom?' is 'Wo ist die Toilette?'", + "model_response": "The German translation of 'Where is the bathroom?' is 'Wie es \u00fcber das B\u00fccher?'." + }, + { + "instruction": "Generate a past-tense verb that describes a person laughing.", + "input": "", + "output": "Laughed.", + "model_response": "She laughed." + }, + { + "instruction": "Generate a sentence using the word 'transient.'", + "input": "", + "output": "The transient nature of her visit left a lasting impression.", + "model_response": "She was seen by a passing car as it passed by." + }, + { + "instruction": "Generate a sentence using the word 'optimistic'.", + "input": "", + "output": "He remained optimistic despite the challenges he faced.", + "model_response": "She was optimistic about the future." + }, + { + "instruction": "Re-word this sentence using an indirect question.", + "input": "What time is the meeting?", + "output": "Could you tell me what time the meeting is?", + "model_response": "The meeting is at 7:00 PM." + }, + { + "instruction": "Categorize the following sentence as a statement, a question, or an exclamation.", + "input": "What a beautiful day!", + "output": "Exclamation.", + "model_response": "A statement could be classified as a question." + }, + { + "instruction": "What is the opposite of 'rich'?", + "input": "", + "output": "The opposite of 'rich' is 'poor'.", + "model_response": "The opposite of 'rich' is 'poor'." + }, + { + "instruction": "Find a synonym for the given verb.", + "input": "Begin", + "output": "Commence", + "model_response": "Begin\n\n### Input:\nBegin\n\n\nBegin\n\n### Input:\nBegin\n\nBegin\n### Input:\nBegin\n\nBegin" + }, + { + "instruction": "Edit the given text to ensure all plural nouns are spelled correctly.", + "input": "The birds sings beautiful songs.", + "output": "The birds sing beautiful songs.", + "model_response": "The birds sings beautiful songs." + }, + { + "instruction": "Transform the following sentence into a question using \"could.\"", + "input": "You can help me tomorrow.", + "output": "Could you help me tomorrow?", + "model_response": "Could you help me tomorrow?" + }, + { + "instruction": "Classify the following items: bicycle, rose, tiger.", + "input": "", + "output": "Vehicles: Bicycle\nPlants: Rose\nAnimals: Tiger", + "model_response": "Animals: Tiger\nBicyclist: Tiger\nRose: Rose\nBike: Tiger" + }, + { + "instruction": "Define the term 'irony'.", + "input": "", + "output": "Irony is a figure of speech in which words are used in such a way that their intended meaning is different from the actual meaning of the words.", + "model_response": "An irony is a statement that is at odds with the truth." + }, + { + "instruction": "Translate 'Welcome' into German.", + "input": "", + "output": "The German translation of 'Welcome' is 'Willkommen'.", + "model_response": "The German translation of 'Welcome' is 'Wie es tut mir leid'." + }, + { + "instruction": "Explain the primary function of the human heart.", + "input": "", + "output": "The primary function of the human heart is to pump blood throughout the body, delivering oxygen and nutrients to tissues and removing carbon dioxide and other wastes.", + "model_response": "The primary function of the human heart is to pump blood to the brain and to supply the muscles and nerves that control movement." + }, + { + "instruction": "Reword the following sentence to the future tense.", + "input": "He is reading a novel inspired by his grandmother.", + "output": "He will be reading a novel inspired by his grandmother.", + "model_response": "He is reading a novel inspired by his grandmother." + }, + { + "instruction": "Convert the given sentence into active voice.", + "input": "The law was passed by the government.", + "output": "The government passed the law.", + "model_response": "The law was passed by the government." + }, + { + "instruction": "Create a sentence using the word 'inevitable'.", + "input": "", + "output": "The confrontation was inevitable given the circumstances.", + "model_response": "The storm was inevitable." + }, + { + "instruction": "Categorize the following sentence as either factual or opinion-based.", + "input": "Chocolate is the best dessert.", + "output": "Opinion-based.", + "model_response": "Categorize the following sentence as either factual or opinion-based." + }, + { + "instruction": "What is an antonym of 'old'?", + "input": "", + "output": "young.", + "model_response": "An antonym of 'old' is 'young'." + }, + { + "instruction": "Provide a synonym for 'hardworking'.", + "input": "", + "output": "A synonym for 'hardworking' is 'diligent'.", + "model_response": "A synonym for 'hardworking' is 'smart'." + }, + { + "instruction": "What is the boiling point of sulfur in Celsius?", + "input": "", + "output": "The boiling point of sulfur is 444.6 degrees Celsius.", + "model_response": "The boiling point of sulfur is -114.5 degrees Celsius." + }, + { + "instruction": "What is the plural form of 'child'?", + "input": "", + "output": "The plural form of 'child' is 'children'.", + "model_response": "The plural form of 'child' is 'chunk'." + }, + { + "instruction": "What is an antonym of 'complicated'?", + "input": "", + "output": "An antonym of 'complicated' is 'simple'.", + "model_response": "An antonym of 'complicated' is 'simple'." + }, + { + "instruction": "Name three forms of water.", + "input": "", + "output": "The three forms of water are solid (ice), liquid (water), and gas (steam).", + "model_response": "Three forms of water are rain, snow, and water." + }, + { + "instruction": "Rewrite this sentence as a question.", + "input": "The dog chased the cat.", + "output": "Did the dog chase the cat?", + "model_response": "What is the cat chasing?" + }, + { + "instruction": "Split the following sentence into two declarative sentences: 'The movie was long but interesting.'", + "input": "", + "output": "The movie was long. It was interesting.", + "model_response": "and 'The movie was interesting.'\n\n\nThe movie was long, but it was interesting." + }, + { + "instruction": "Classify the following substances as acid, base, or neutral.", + "input": "Lemon juice, Soap, Water", + "output": "Acid: Lemon juice\nBase: Soap\nNeutral: Water", + "model_response": "Acid: Soap\nBase: Soap\nNeutral: Water" + }, + { + "instruction": "What is a synonym of 'sad'?", + "input": "", + "output": "A synonym for 'sad' is 'unhappy'.", + "model_response": "A synonym for 'sad' is 'angry'." + }, + { + "instruction": "Correct any spelling mistakes in the given sentence.", + "input": "I prefer homemade cookies to store boaght.", + "output": "I prefer homemade cookies to store bought.", + "model_response": "I prefer homemade cookies to store boaght." + }, + { + "instruction": "Generate a sentence using the word 'transient'.", + "input": "", + "output": "His stay in the city was transient, lasting only a couple of days.", + "model_response": "She was seen by a passerby as she passed by." + }, + { + "instruction": "Translate 'I am lost' into Italian.", + "input": "", + "output": "The Italian translation of 'I am lost' is 'Mi sono perso' (if male) or 'Mi sono persa' (if female).", + "model_response": "Il pi\u00f9 am\u00e9ricaino." + }, + { + "instruction": "Classify this text as a technical document or a narrative.", + "input": "This manual provides instructions for installing the software.", + "output": "Technical document", + "model_response": "Technical document" + }, + { + "instruction": "Sort the following list in descending order.", + "input": "10, 2, 25, 16, 7", + "output": "25, 16, 10, 7, 2.", + "model_response": ", 3, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25" + }, + { + "instruction": "Translate 'Can I have some water?' into French.", + "input": "", + "output": "Puis-je avoir de l'eau?", + "model_response": "Can I have some water?" + }, + { + "instruction": "Create a simile with the word 'as cold as'.", + "input": "", + "output": "Her hands were as cold as ice.", + "model_response": "The temperature of 'as cold as' is -38 degrees Celsius." + }, + { + "instruction": "Classify the following words by their grammatical categories: swim, beautiful, quickly", + "input": "", + "output": "Swim: Verb\nBeautiful: Adjective\nQuickly: Adverb", + "model_response": "Classifying the following words by their grammatical categories: swim, beautiful, quickly" + }, + { + "instruction": "Calculate the density of an object with a mass of 15 grams and a volume of 5 cubic centimeters.", + "input": "", + "output": "The density of the object is 3 grams per cubic centimeter.", + "model_response": "The density of the object is 15 grams/cm\u00b3." + }, + { + "instruction": "What is the abbreviation for 'Master of Business Administration'?", + "input": "", + "output": "The abbreviation for 'Master of Business Administration' is MBA.", + "model_response": "The abbreviation for 'Master of Business Administration' is MBA." + }, + { + "instruction": "Convert the following number from Roman numerals: IX.", + "input": "", + "output": "The number IX in Roman numerals is 9.", + "model_response": "5 to decimal numbers: 0.5.\n\n\n0.5 is 0.5." + }, + { + "instruction": "What is the opposite of 'horizontal'?", + "input": "", + "output": "The opposite of 'horizontal' is 'vertical'.", + "model_response": "The opposite of 'horizontal' is 'vertical'." + }, + { + "instruction": "Translate 'Where can I buy tickets?' into Italian.", + "input": "", + "output": "The Italian translation for 'Where can I buy tickets?' is 'Dove posso comprare i biglietti?'", + "model_response": "The Italian translation of 'Where can I buy tickets?' is 'Ti amo'." + }, + { + "instruction": "Rewrite the following sentence to replace any clich\u00e9s.", + "input": "He was as cool as a cucumber.", + "output": "He remained very calm.", + "model_response": "He was as cool as a cucumber." + }, + { + "instruction": "Identify the main verb in the sentence.", + "input": "The dog barked loudly.", + "output": "The main verb in the sentence is 'barked'.", + "model_response": "The main verb in the sentence is 'bark'." + }, + { + "instruction": "Generate a sentence using the word 'elucidate'.", + "input": "", + "output": "The professor attempted to elucidate the complex topic for his students.", + "model_response": "The teacher had studied the book for a long time." + }, + { + "instruction": "Correct the sentence.", + "input": "Me and my friend went to the store.", + "output": "My friend and I went to the store.", + "model_response": "We went to the store." + }, + { + "instruction": "What is the formula for calculating work done?", + "input": "", + "output": "The formula for calculating work done is work = force \u00d7 distance.", + "model_response": "The formula for calculating work done is W=M*G*A." + }, + { + "instruction": "What is the chemical formula for ammonium nitrate?", + "input": "", + "output": "The chemical formula for ammonium nitrate is NH4NO3.", + "model_response": "The chemical formula for ammonium nitrate is NH3." + }, + { + "instruction": "What is the molecular formula for water?", + "input": "", + "output": "The molecular formula for water is H2O.", + "model_response": "The molecular formula for water is H2O2." + }, + { + "instruction": "Rewrite the given sentence to describe the same thing in a positive way.", + "input": "The food was not good.", + "output": "The food could use some improvement.", + "model_response": "The food was not good." + }, + { + "instruction": "What is the opposite of 'lazy'?", + "input": "", + "output": "The opposite of 'lazy' is 'diligent'.", + "model_response": "The opposite of 'lazy' is 'attentive'." + }, + { + "instruction": "Name three essential vitamins for human health.", + "input": "", + "output": "1. Vitamin A\n2. Vitamin C\n3. Vitamin D", + "model_response": "1. B6\n2. B12\n3. B12\n### Instruction:\nWhat is the chemical symbol for iron?\n\n\nThe chemical symbol for iron is Fe." + }, + { + "instruction": "Define the term 'simile'.", + "input": "", + "output": "A simile is a figure of speech that directly compares two different things, often introduced with the words 'like' or 'as'.", + "model_response": "A simile is a figurative device used to describe a person or thing that is simile or similitude in form or function." + }, + { + "instruction": "What is the boiling point of chlorine in Celsius?", + "input": "", + "output": "The boiling point of chlorine is -34 degrees Celsius.", + "model_response": "The boiling point of chlorine is -196 degrees Celsius." + }, + { + "instruction": "Translate 'My name is' into French.", + "input": "", + "output": "The French translation of 'My name is' is 'Je m'appelle'.", + "model_response": "The French translation of 'My name is' is 'Comment \u00e7a va?'." + }, + { + "instruction": "Convert 200 centimeters to meters.", + "input": "", + "output": "200 centimeters is 2 meters.", + "model_response": "200 centimeters is 0.2 meters." + }, + { + "instruction": "What is the chemical symbol for zinc?", + "input": "", + "output": "Zn.", + "model_response": "The chemical symbol for zinc is Zn." + }, + { + "instruction": "What is the formula for calculating force?", + "input": "", + "output": "The formula for calculating force is Force = mass x acceleration.", + "model_response": "The formula for calculating force is (F = mv\u00b2 + g)f, where m is mass and g is acceleration." + }, + { + "instruction": "Generate a sentence using the word 'innovative'.", + "input": "", + "output": "The company's innovative approach set it apart from its competitors.", + "model_response": "She was very innovative and always had a new idea." + }, + { + "instruction": "Generate a sentence using the word 'generous'.", + "input": "", + "output": "He is very generous and always helps those in need.", + "model_response": "She was very generous and gave the money to the poor." + }, + { + "instruction": "What is an antonym of 'sharp'?", + "input": "", + "output": "An antonym of 'sharp' is 'dull'.", + "model_response": "An antonym of 'sharp' is 'soft'." + }, + { + "instruction": "Describe the basic structure of a neuron.", + "input": "", + "output": "A neuron consists of three main parts: the cell body, which contains the nucleus; dendrites, which receive signals from other neurons; and an axon, which transmits signals to other neurons, muscles, or glands.", + "model_response": "A neuron is a type of synapse in the brain." + }, + { + "instruction": "Translate 'see you later' into Spanish.", + "input": "", + "output": "Hasta luego", + "model_response": "Est\u00e1n leyendo see you later." + }, + { + "instruction": "What are the first 10 elements of the Fibonacci sequence?", + "input": "", + "output": "0, 1, 1, 2, 3, 5, 8, 13, 21, 34.", + "model_response": "The first 10 elements of the Fibonacci sequence are: 1, 3, 5, 7, 11, 13, 15, 20." + }, + { + "instruction": "What is an antonym of 'transparent'?", + "input": "", + "output": "An antonym of 'transparent' is 'opaque'.", + "model_response": "An antonym of 'transparent' is 'illuminate'." + }, + { + "instruction": "What is the past tense of 'think'?", + "input": "", + "output": "The past tense of 'think' is 'thought'.", + "model_response": "The past tense of 'think' is 'to think'." + }, + { + "instruction": "Classify each sentence as either declarative, interrogative, imperative, or exclamatory.", + "input": "Please open the door.", + "output": "The classification of the sentence 'Please open the door.' is imperative.", + "model_response": "Please open the door." + }, + { + "instruction": "Rewrite the sentence to use a negative adverb.", + "input": "She always remembers to call.", + "output": "She never forgets to call.", + "model_response": "She always remembers to call." + }, + { + "instruction": "Convert 50 miles per hour to kilometers per hour.", + "input": "", + "output": "50 miles per hour is approximately 80.47 kilometers per hour.", + "model_response": "50 miles per hour is approximately 32.5 kilometers per hour." + } +] \ No newline at end of file diff --git a/ch07/03_model-evaluation/llm-instruction-eval-ollama.ipynb b/ch07/03_model-evaluation/llm-instruction-eval-ollama.ipynb index 23c255d..a4faae4 100644 --- a/ch07/03_model-evaluation/llm-instruction-eval-ollama.ipynb +++ b/ch07/03_model-evaluation/llm-instruction-eval-ollama.ipynb @@ -178,6 +178,14 @@ "- Next, run the following code cell to query the model" ] }, + { + "cell_type": "markdown", + "id": "16642a48-1cab-40d2-af08-ab8c2fbf5876", + "metadata": {}, + "source": [ + "- First, let's try the API with a simple example to make sure it works as intended:" + ] + }, { "cell_type": "code", "execution_count": 2, @@ -246,14 +254,6 @@ "print(result)" ] }, - { - "cell_type": "markdown", - "id": "16642a48-1cab-40d2-af08-ab8c2fbf5876", - "metadata": {}, - "source": [ - "- First, let's try the API with a simple example to make sure it works as intended:" - ] - }, { "cell_type": "markdown", "id": "162a4739-6f03-4092-a5c2-f57a0b6a4c4d", @@ -571,7 +571,7 @@ "id": "b071ce84-1866-427f-a272-b46700f364b2", "metadata": {}, "source": [ - "- Let's now apply this evaluation to the whole dataset and compute the average score of each model (this takes about 1 minute per model on a M3 MacBook Air laptop)\n", + "- Let's now apply this evaluation to the whole dataset and compute the average score of each model (this takes about 1 minute per model on an M3 MacBook Air laptop)\n", "- Note that ollama is not fully deterministic (as of this writing) so the numbers you are getting might slightly differ from the ones shown below" ] }, @@ -666,7 +666,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.6" + "version": "3.11.4" } }, "nbformat": 4,