diff --git "a/F_SciTLDR.ipynb" "b/F_SciTLDR.ipynb" new file mode 100644--- /dev/null +++ "b/F_SciTLDR.ipynb" @@ -0,0 +1,10560 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "231bbb9d", + "metadata": {}, + "source": [ + "## IMpoRT LiB" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "73f62ec7", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[nltk_data] Downloading package punkt to\n", + "[nltk_data] C:\\Users\\User\\AppData\\Roaming\\nltk_data...\n", + "[nltk_data] Package punkt is already up-to-date!\n" + ] + }, + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import json\n", + "import nltk\n", + "import pandas as pd\n", + "import matplotlib.pyplot as plt\n", + "import re\n", + "\n", + "nltk.download(\"punkt\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f014c1c7", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "ab72a075", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "markdown", + "id": "b74b5bdd", + "metadata": {}, + "source": [ + "## LoaD DataSeT\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2f97f234", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 176, + "id": "d2f6a241", + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "SciTLDR_train = pd.read_json('SciTLDR_train.jsonl', lines=True)\n", + "SciTLDR_test = pd.read_json('SciTLDR_test.jsonl', lines=True)\n", + "SciTLDR_dev = pd.read_json('SciTLDR_dev.jsonl', lines=True)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d5a933fe", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 177, + "id": "2fb93c3b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(1992, 6)" + ] + }, + "execution_count": 177, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "SciTLDR_train.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 178, + "id": "31b264ac", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(618, 6)" + ] + }, + "execution_count": 178, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "SciTLDR_test.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 179, + "id": "a55ad439", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(619, 6)" + ] + }, + "execution_count": 179, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "SciTLDR_dev.shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5966a5b6", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "799116c4", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "markdown", + "id": "c8ff8f8c", + "metadata": {}, + "source": [ + " ## ReName ColuMnS" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9e8ffd3f", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 180, + "id": "f2aad604", + "metadata": {}, + "outputs": [], + "source": [ + "N_SciTLDR_train = SciTLDR_train[['title','source','target']].copy()\n", + "N_SciTLDR_test = SciTLDR_test[['title','source','target']].copy()\n", + "N_SciTLDR_dev = SciTLDR_dev[['title','source','target']].copy()\n", + "\n", + "N_SciTLDR_train.rename(columns = {'source':'Text','target':'Summary','title':'Title'}, inplace = True)\n", + "N_SciTLDR_test.rename(columns = {'source':'Text','target':'Summary','title':'Title'}, inplace = True)\n", + "N_SciTLDR_dev.rename(columns = {'source':'Text','target':'Summary','title':'Title'}, inplace = True)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dacedbe7", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "7c667a01", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "480133f0", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 181, + "id": "ddfa268b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "RangeIndex: 1992 entries, 0 to 1991\n", + "Data columns (total 3 columns):\n", + " # Column Non-Null Count Dtype \n", + "--- ------ -------------- ----- \n", + " 0 Title 1992 non-null object\n", + " 1 Text 1992 non-null object\n", + " 2 Summary 1992 non-null object\n", + "dtypes: object(3)\n", + "memory usage: 46.8+ KB\n" + ] + } + ], + "source": [ + "N_SciTLDR_train.info()" + ] + }, + { + "cell_type": "code", + "execution_count": 182, + "id": "b81ecfbb", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "RangeIndex: 618 entries, 0 to 617\n", + "Data columns (total 3 columns):\n", + " # Column Non-Null Count Dtype \n", + "--- ------ -------------- ----- \n", + " 0 Title 618 non-null object\n", + " 1 Text 618 non-null object\n", + " 2 Summary 618 non-null object\n", + "dtypes: object(3)\n", + "memory usage: 14.6+ KB\n" + ] + } + ], + "source": [ + "N_SciTLDR_test.info()" + ] + }, + { + "cell_type": "code", + "execution_count": 183, + "id": "a98a0c09", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "RangeIndex: 619 entries, 0 to 618\n", + "Data columns (total 3 columns):\n", + " # Column Non-Null Count Dtype \n", + "--- ------ -------------- ----- \n", + " 0 Title 619 non-null object\n", + " 1 Text 619 non-null object\n", + " 2 Summary 619 non-null object\n", + "dtypes: object(3)\n", + "memory usage: 14.6+ KB\n" + ] + } + ], + "source": [ + "N_SciTLDR_dev.info()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4fdb5704", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 184, + "id": "6fa6c8f1", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Title 0\n", + "Text 0\n", + "Summary 0\n", + "dtype: int64" + ] + }, + "execution_count": 184, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "N_SciTLDR_train.isnull().sum(axis=0)" + ] + }, + { + "cell_type": "code", + "execution_count": 185, + "id": "ecbfc8a8", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Title 0\n", + "Text 0\n", + "Summary 0\n", + "dtype: int64" + ] + }, + "execution_count": 185, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "N_SciTLDR_test.isnull().sum(axis=0)" + ] + }, + { + "cell_type": "code", + "execution_count": 186, + "id": "a8f129a2", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Title 0\n", + "Text 0\n", + "Summary 0\n", + "dtype: int64" + ] + }, + "execution_count": 186, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "N_SciTLDR_dev.isnull().sum(axis=0)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2692269d", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "be63a507", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b93cecfa", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "eb7728ef", + "metadata": {}, + "source": [ + "## GeT DupLiCateS" + ] + }, + { + "cell_type": "markdown", + "id": "16d12cca", + "metadata": {}, + "source": [ + "### 1) SoLve ErrOr TO GeT DuP\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "82f1a9ec", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 187, + "id": "ac3cdc8e", + "metadata": {}, + "outputs": [], + "source": [ + " # unhashable type: 'list'\n", + " \n", + "N_SciTLDR_train['Clean_Text'] = N_SciTLDR_train['Text'].astype(str)\n", + "N_SciTLDR_train['Clean_Summary'] = N_SciTLDR_train['Summary'].astype(str)\n", + "N_SciTLDR_train['Clean_Title'] = N_SciTLDR_train['Title'].astype(str)\n", + "\n", + "N_SciTLDR_test['Clean_Text'] = N_SciTLDR_test['Text'].astype(str)\n", + "N_SciTLDR_test['Clean_Summary'] = N_SciTLDR_test['Summary'].astype(str)\n", + "N_SciTLDR_test['Clean_Title'] = N_SciTLDR_test['Title'].astype(str)\n", + "\n", + "N_SciTLDR_dev['Clean_Text'] = N_SciTLDR_dev['Text'].astype(str)\n", + "N_SciTLDR_dev['Clean_Summary'] = N_SciTLDR_dev['Summary'].astype(str)\n", + "N_SciTLDR_dev['Clean_Title'] = N_SciTLDR_dev['Title'].astype(str)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cdf3807b", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 188, + "id": "ba74bd33", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Index(['Clean_Title', 'Clean_Text', 'Clean_Summary', 'L_Clean_Text',\n", + " 'L_Clean_Summary', 'L_Clean_Title'],\n", + " dtype='object')" + ] + }, + "execution_count": 188, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "\n", + "NN_SciTLDR_train = N_SciTLDR_train[['Clean_Title','Clean_Text','Clean_Summary']].copy()\n", + "\n", + "NN_SciTLDR_train['L_Clean_Text'] = NN_SciTLDR_train['Clean_Text'].str.lower()\n", + "NN_SciTLDR_train['L_Clean_Summary']=NN_SciTLDR_train['Clean_Summary'].str.lower()\n", + "NN_SciTLDR_train['L_Clean_Title']=NN_SciTLDR_train['Clean_Title'].str.lower()\n", + "\n", + "\n", + "NN_SciTLDR_train.columns\n" + ] + }, + { + "cell_type": "code", + "execution_count": 189, + "id": "05b902f4", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Index(['Clean_Title', 'Clean_Text', 'Clean_Summary', 'L_Clean_Text',\n", + " 'L_Clean_Summary', 'L_Clean_Title'],\n", + " dtype='object')" + ] + }, + "execution_count": 189, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "\n", + "NN_SciTLDR_test = N_SciTLDR_test[['Clean_Title','Clean_Text','Clean_Summary']].copy()\n", + "\n", + "\n", + "NN_SciTLDR_test['L_Clean_Text'] = NN_SciTLDR_test['Clean_Text'].str.lower()\n", + "NN_SciTLDR_test['L_Clean_Summary']=NN_SciTLDR_test['Clean_Summary'].str.lower()\n", + "NN_SciTLDR_test['L_Clean_Title']=NN_SciTLDR_test['Clean_Title'].str.lower()\n", + "\n", + "\n", + "NN_SciTLDR_test.columns\n" + ] + }, + { + "cell_type": "code", + "execution_count": 190, + "id": "fbef4777", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Index(['Clean_Title', 'Clean_Text', 'Clean_Summary', 'L_Clean_Text',\n", + " 'L_Clean_Summary', 'L_Clean_Title'],\n", + " dtype='object')" + ] + }, + "execution_count": 190, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "\n", + "NN_SciTLDR_dev = N_SciTLDR_dev[['Clean_Title','Clean_Text','Clean_Summary']].copy()\n", + "\n", + "\n", + "NN_SciTLDR_dev['L_Clean_Text'] = NN_SciTLDR_dev['Clean_Text'].str.lower()\n", + "NN_SciTLDR_dev['L_Clean_Summary']=NN_SciTLDR_dev['Clean_Summary'].str.lower()\n", + "NN_SciTLDR_dev['L_Clean_Title']=NN_SciTLDR_dev['Clean_Title'].str.lower()\n", + "\n", + "\n", + "\n", + "NN_SciTLDR_dev.columns\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ced6a51d", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "38698d10", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "markdown", + "id": "1b4b427c", + "metadata": {}, + "source": [ + "## 2) GeT DuP\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0085f71e", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 191, + "id": "80c0e36e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Clean_TitleClean_TextClean_SummaryL_Clean_TextL_Clean_SummaryL_Clean_Title
count199219921992199219921992
unique198319911990199119901982
topMinimal Random Code Learning: Getting Bits Bac...['We present SOSELETO (SOurce SELEction for Ta...['GAN representations are examined in detail, ...['we present soseleto (source selection for ta...['gan representations are examined in detail, ...structured prediction using cgans with fusion ...
freq222222
\n", + "
" + ], + "text/plain": [ + " Clean_Title \\\n", + "count 1992 \n", + "unique 1983 \n", + "top Minimal Random Code Learning: Getting Bits Bac... \n", + "freq 2 \n", + "\n", + " Clean_Text \\\n", + "count 1992 \n", + "unique 1991 \n", + "top ['We present SOSELETO (SOurce SELEction for Ta... \n", + "freq 2 \n", + "\n", + " Clean_Summary \\\n", + "count 1992 \n", + "unique 1990 \n", + "top ['GAN representations are examined in detail, ... \n", + "freq 2 \n", + "\n", + " L_Clean_Text \\\n", + "count 1992 \n", + "unique 1991 \n", + "top ['we present soseleto (source selection for ta... \n", + "freq 2 \n", + "\n", + " L_Clean_Summary \\\n", + "count 1992 \n", + "unique 1990 \n", + "top ['gan representations are examined in detail, ... \n", + "freq 2 \n", + "\n", + " L_Clean_Title \n", + "count 1992 \n", + "unique 1982 \n", + "top structured prediction using cgans with fusion ... \n", + "freq 2 " + ] + }, + "execution_count": 191, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.describe()\n", + "# Count-Unique = DuP" + ] + }, + { + "cell_type": "code", + "execution_count": 192, + "id": "8da63f32", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "1" + ] + }, + "execution_count": 192, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.duplicated().sum()" + ] + }, + { + "cell_type": "code", + "execution_count": 193, + "id": "dca4e1ab", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "9" + ] + }, + "execution_count": 193, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.duplicated(subset=['Clean_Title']).sum()\n" + ] + }, + { + "cell_type": "code", + "execution_count": 194, + "id": "412cb0f9", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "10" + ] + }, + "execution_count": 194, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.duplicated(subset=['L_Clean_Title']).sum()\n" + ] + }, + { + "cell_type": "code", + "execution_count": 195, + "id": "7e918e70", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "1" + ] + }, + "execution_count": 195, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.duplicated(subset=['Clean_Text']).sum()\n" + ] + }, + { + "cell_type": "code", + "execution_count": 196, + "id": "d1e96d34", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "1" + ] + }, + "execution_count": 196, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.duplicated(subset=['L_Clean_Text']).sum()\n" + ] + }, + { + "cell_type": "code", + "execution_count": 197, + "id": "22fb1070", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "2" + ] + }, + "execution_count": 197, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.duplicated(subset=['Clean_Summary']).sum()\n" + ] + }, + { + "cell_type": "code", + "execution_count": 198, + "id": "e68a1442", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "2" + ] + }, + "execution_count": 198, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.duplicated(subset=['L_Clean_Summary']).sum()\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fabfaebd", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 199, + "id": "56b17365", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Clean_TitleClean_TextClean_SummaryL_Clean_TextL_Clean_SummaryL_Clean_Title
count618618618618618618
unique618618618618618618
topFearNet: Brain-Inspired Model for Incremental ...['Incremental class learning involves sequenti...['FearNet is a memory efficient neural-network...['incremental class learning involves sequenti...['fearnet is a memory efficient neural-network...fearnet: brain-inspired model for incremental ...
freq111111
\n", + "
" + ], + "text/plain": [ + " Clean_Title \\\n", + "count 618 \n", + "unique 618 \n", + "top FearNet: Brain-Inspired Model for Incremental ... \n", + "freq 1 \n", + "\n", + " Clean_Text \\\n", + "count 618 \n", + "unique 618 \n", + "top ['Incremental class learning involves sequenti... \n", + "freq 1 \n", + "\n", + " Clean_Summary \\\n", + "count 618 \n", + "unique 618 \n", + "top ['FearNet is a memory efficient neural-network... \n", + "freq 1 \n", + "\n", + " L_Clean_Text \\\n", + "count 618 \n", + "unique 618 \n", + "top ['incremental class learning involves sequenti... \n", + "freq 1 \n", + "\n", + " L_Clean_Summary \\\n", + "count 618 \n", + "unique 618 \n", + "top ['fearnet is a memory efficient neural-network... \n", + "freq 1 \n", + "\n", + " L_Clean_Title \n", + "count 618 \n", + "unique 618 \n", + "top fearnet: brain-inspired model for incremental ... \n", + "freq 1 " + ] + }, + "execution_count": 199, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.describe()" + ] + }, + { + "cell_type": "code", + "execution_count": 200, + "id": "bc615e65", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0" + ] + }, + "execution_count": 200, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.duplicated().sum()" + ] + }, + { + "cell_type": "code", + "execution_count": 201, + "id": "b7b1963a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0" + ] + }, + "execution_count": 201, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.duplicated(subset=['L_Clean_Title']).sum()" + ] + }, + { + "cell_type": "code", + "execution_count": 202, + "id": "9047b49a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0" + ] + }, + "execution_count": 202, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.duplicated(subset=['L_Clean_Text']).sum()" + ] + }, + { + "cell_type": "code", + "execution_count": 203, + "id": "5f979786", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0" + ] + }, + "execution_count": 203, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.duplicated(subset=['L_Clean_Summary']).sum()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9b4f6985", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 204, + "id": "7c8dd312", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Clean_TitleClean_TextClean_SummaryL_Clean_TextL_Clean_SummaryL_Clean_Title
count619619619619619619
unique616618619618619616
topDoubly Sparse: Sparse Mixture of Sparse Expert...['Despite an ever growing literature on reinfo...['We devise adaptive loss scaling to improve m...['despite an ever growing literature on reinfo...['we devise adaptive loss scaling to improve m...doubly sparse: sparse mixture of sparse expert...
freq221212
\n", + "
" + ], + "text/plain": [ + " Clean_Title \\\n", + "count 619 \n", + "unique 616 \n", + "top Doubly Sparse: Sparse Mixture of Sparse Expert... \n", + "freq 2 \n", + "\n", + " Clean_Text \\\n", + "count 619 \n", + "unique 618 \n", + "top ['Despite an ever growing literature on reinfo... \n", + "freq 2 \n", + "\n", + " Clean_Summary \\\n", + "count 619 \n", + "unique 619 \n", + "top ['We devise adaptive loss scaling to improve m... \n", + "freq 1 \n", + "\n", + " L_Clean_Text \\\n", + "count 619 \n", + "unique 618 \n", + "top ['despite an ever growing literature on reinfo... \n", + "freq 2 \n", + "\n", + " L_Clean_Summary \\\n", + "count 619 \n", + "unique 619 \n", + "top ['we devise adaptive loss scaling to improve m... \n", + "freq 1 \n", + "\n", + " L_Clean_Title \n", + "count 619 \n", + "unique 616 \n", + "top doubly sparse: sparse mixture of sparse expert... \n", + "freq 2 " + ] + }, + "execution_count": 204, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.describe()" + ] + }, + { + "cell_type": "code", + "execution_count": 205, + "id": "a32ee115", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0" + ] + }, + "execution_count": 205, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.duplicated().sum()" + ] + }, + { + "cell_type": "code", + "execution_count": 206, + "id": "935b2c3e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "3" + ] + }, + "execution_count": 206, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.duplicated(subset=['L_Clean_Title']).sum()" + ] + }, + { + "cell_type": "code", + "execution_count": 207, + "id": "893ac748", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "1" + ] + }, + "execution_count": 207, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.duplicated(subset=['L_Clean_Text']).sum()" + ] + }, + { + "cell_type": "code", + "execution_count": 208, + "id": "2ac807db", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0" + ] + }, + "execution_count": 208, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.duplicated(subset=['L_Clean_Summary']).sum()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fbb9b843", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "4015d751", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "28ceffbe", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 209, + "id": "699b0418", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Clean_TitleClean_TextClean_SummaryL_Clean_TextL_Clean_SummaryL_Clean_Title
254Generalized Transformation-based Gradient['The reparameterization trick has become one ...['We propose a novel generalized transformatio...['the reparameterization trick has become one ...['we propose a novel generalized transformatio...generalized transformation-based gradient
302Deep Imitative Models for Flexible Inference, ...['Imitation Learning (IL) is an appealing appr...['In this paper, we propose Imitative Models t...['imitation learning (il) is an appealing appr...['in this paper, we propose imitative models t...deep imitative models for flexible inference, ...
327Optimal transport maps for distribution preser...['Generative models such as Variational Auto E...['Operations in the GAN latent space can induc...['generative models such as variational auto e...['operations in the gan latent space can induc...optimal transport maps for distribution preser...
330Optimal Transport Maps For Distribution Preser...['Generative models such as Variational Auto E...['We propose a framework for modifying the lat...['generative models such as variational auto e...['we propose a framework for modifying the lat...optimal transport maps for distribution preser...
457Semi-supervised classification by reaching con...['Deep learning has demonstrated abilities to ...['TCN for multimodal semi-supervised learning ...['deep learning has demonstrated abilities to ...['tcn for multimodal semi-supervised learning ...semi-supervised classification by reaching con...
536Semi-supervised classification by reaching con...['We extend the Consensus Network framework to...['A semi-supervised multi-modal classification...['we extend the consensus network framework to...['a semi-supervised multi-modal classification...semi-supervised classification by reaching con...
886Minimal Random Code Learning: Getting Bits Bac...['While deep neural networks are a highly succ...['This paper proposes an effective method to c...['while deep neural networks are a highly succ...['this paper proposes an effective method to c...minimal random code learning: getting bits bac...
891Rethinking the Value of Network Pruning['Network pruning is widely used for reducing ...['In network pruning, fine-tuning a pruned mod...['network pruning is widely used for reducing ...['in network pruning, fine-tuning a pruned mod...rethinking the value of network pruning
915The Role of Embedding Complexity in Domain-inv...['Unsupervised domain adaptation aims to gener...[\"A general upper bound on the target domain's...['unsupervised domain adaptation aims to gener...[\"a general upper bound on the target domain's...the role of embedding complexity in domain-inv...
1048Structured Prediction using cGANs with Fusion ...['We propose a novel method for incorporating ...['We propose a novel way to incorporate condit...['we propose a novel method for incorporating ...['we propose a novel way to incorporate condit...structured prediction using cgans with fusion ...
1052Minimal Random Code Learning: Getting Bits Bac...['While deep neural networks are a highly succ...['This paper proposes an effective coding sche...['while deep neural networks are a highly succ...['this paper proposes an effective coding sche...minimal random code learning: getting bits bac...
1060SOSELETO: A Unified Approach to Transfer Learn...['We present SOSELETO (SOurce SELEction for Ta...['Learning with limited training data by explo...['we present soseleto (source selection for ta...['learning with limited training data by explo...soseleto: a unified approach to transfer learn...
1164Structured Prediction using cGANs with Fusion ...['We propose the fusion discriminator, a singl...['We propose the fusion discriminator, a novel...['we propose the fusion discriminator, a singl...['we propose the fusion discriminator, a novel...structured prediction using cgans with fusion ...
1176The Role of Embedding Complexity in Domain-inv...['Unsupervised domain adaptation aims to gener...['We study the effect of the embedding complex...['unsupervised domain adaptation aims to gener...['we study the effect of the embedding complex...the role of embedding complexity in domain-inv...
1451Batch Normalization is a Cause of Adversarial ...['Batch normalization (BN) is often used in an...['Batch normalization reduces robustness at te...['batch normalization (bn) is often used in an...['batch normalization reduces robustness at te...batch normalization is a cause of adversarial ...
1506Batch Normalization is a Cause of Adversarial ...['Batch normalization (batch norm) is often us...['Batch normalization reduces adversarial robu...['batch normalization (batch norm) is often us...['batch normalization reduces adversarial robu...batch normalization is a cause of adversarial ...
1522Deep Imitative Models for Flexible Inference, ...['Imitation learning provides an appealing fra...['Hybrid Vision-Driven Imitation Learning and ...['imitation learning provides an appealing fra...['hybrid vision-driven imitation learning and ...deep imitative models for flexible inference, ...
1531SOSELETO: A Unified Approach to Transfer Learn...['We present SOSELETO (SOurce SELEction for Ta...['Learning with limited training data by explo...['we present soseleto (source selection for ta...['learning with limited training data by explo...soseleto: a unified approach to transfer learn...
1551Generalized Transformation-based Gradient['The reparameterization trick has become one ...['a generalized transformation-based gradient ...['the reparameterization trick has become one ...['a generalized transformation-based gradient ...generalized transformation-based gradient
1889Rethinking the Value of Network Pruning['Network pruning is widely used for reducing ...['In structured network pruning, fine-tuning a...['network pruning is widely used for reducing ...['in structured network pruning, fine-tuning a...rethinking the value of network pruning
\n", + "
" + ], + "text/plain": [ + " Clean_Title \\\n", + "254 Generalized Transformation-based Gradient \n", + "302 Deep Imitative Models for Flexible Inference, ... \n", + "327 Optimal transport maps for distribution preser... \n", + "330 Optimal Transport Maps For Distribution Preser... \n", + "457 Semi-supervised classification by reaching con... \n", + "536 Semi-supervised classification by reaching con... \n", + "886 Minimal Random Code Learning: Getting Bits Bac... \n", + "891 Rethinking the Value of Network Pruning \n", + "915 The Role of Embedding Complexity in Domain-inv... \n", + "1048 Structured Prediction using cGANs with Fusion ... \n", + "1052 Minimal Random Code Learning: Getting Bits Bac... \n", + "1060 SOSELETO: A Unified Approach to Transfer Learn... \n", + "1164 Structured Prediction using cGANs with Fusion ... \n", + "1176 The Role of Embedding Complexity in Domain-inv... \n", + "1451 Batch Normalization is a Cause of Adversarial ... \n", + "1506 Batch Normalization is a Cause of Adversarial ... \n", + "1522 Deep Imitative Models for Flexible Inference, ... \n", + "1531 SOSELETO: A Unified Approach to Transfer Learn... \n", + "1551 Generalized Transformation-based Gradient \n", + "1889 Rethinking the Value of Network Pruning \n", + "\n", + " Clean_Text \\\n", + "254 ['The reparameterization trick has become one ... \n", + "302 ['Imitation Learning (IL) is an appealing appr... \n", + "327 ['Generative models such as Variational Auto E... \n", + "330 ['Generative models such as Variational Auto E... \n", + "457 ['Deep learning has demonstrated abilities to ... \n", + "536 ['We extend the Consensus Network framework to... \n", + "886 ['While deep neural networks are a highly succ... \n", + "891 ['Network pruning is widely used for reducing ... \n", + "915 ['Unsupervised domain adaptation aims to gener... \n", + "1048 ['We propose a novel method for incorporating ... \n", + "1052 ['While deep neural networks are a highly succ... \n", + "1060 ['We present SOSELETO (SOurce SELEction for Ta... \n", + "1164 ['We propose the fusion discriminator, a singl... \n", + "1176 ['Unsupervised domain adaptation aims to gener... \n", + "1451 ['Batch normalization (BN) is often used in an... \n", + "1506 ['Batch normalization (batch norm) is often us... \n", + "1522 ['Imitation learning provides an appealing fra... \n", + "1531 ['We present SOSELETO (SOurce SELEction for Ta... \n", + "1551 ['The reparameterization trick has become one ... \n", + "1889 ['Network pruning is widely used for reducing ... \n", + "\n", + " Clean_Summary \\\n", + "254 ['We propose a novel generalized transformatio... \n", + "302 ['In this paper, we propose Imitative Models t... \n", + "327 ['Operations in the GAN latent space can induc... \n", + "330 ['We propose a framework for modifying the lat... \n", + "457 ['TCN for multimodal semi-supervised learning ... \n", + "536 ['A semi-supervised multi-modal classification... \n", + "886 ['This paper proposes an effective method to c... \n", + "891 ['In network pruning, fine-tuning a pruned mod... \n", + "915 [\"A general upper bound on the target domain's... \n", + "1048 ['We propose a novel way to incorporate condit... \n", + "1052 ['This paper proposes an effective coding sche... \n", + "1060 ['Learning with limited training data by explo... \n", + "1164 ['We propose the fusion discriminator, a novel... \n", + "1176 ['We study the effect of the embedding complex... \n", + "1451 ['Batch normalization reduces robustness at te... \n", + "1506 ['Batch normalization reduces adversarial robu... \n", + "1522 ['Hybrid Vision-Driven Imitation Learning and ... \n", + "1531 ['Learning with limited training data by explo... \n", + "1551 ['a generalized transformation-based gradient ... \n", + "1889 ['In structured network pruning, fine-tuning a... \n", + "\n", + " L_Clean_Text \\\n", + "254 ['the reparameterization trick has become one ... \n", + "302 ['imitation learning (il) is an appealing appr... \n", + "327 ['generative models such as variational auto e... \n", + "330 ['generative models such as variational auto e... \n", + "457 ['deep learning has demonstrated abilities to ... \n", + "536 ['we extend the consensus network framework to... \n", + "886 ['while deep neural networks are a highly succ... \n", + "891 ['network pruning is widely used for reducing ... \n", + "915 ['unsupervised domain adaptation aims to gener... \n", + "1048 ['we propose a novel method for incorporating ... \n", + "1052 ['while deep neural networks are a highly succ... \n", + "1060 ['we present soseleto (source selection for ta... \n", + "1164 ['we propose the fusion discriminator, a singl... \n", + "1176 ['unsupervised domain adaptation aims to gener... \n", + "1451 ['batch normalization (bn) is often used in an... \n", + "1506 ['batch normalization (batch norm) is often us... \n", + "1522 ['imitation learning provides an appealing fra... \n", + "1531 ['we present soseleto (source selection for ta... \n", + "1551 ['the reparameterization trick has become one ... \n", + "1889 ['network pruning is widely used for reducing ... \n", + "\n", + " L_Clean_Summary \\\n", + "254 ['we propose a novel generalized transformatio... \n", + "302 ['in this paper, we propose imitative models t... \n", + "327 ['operations in the gan latent space can induc... \n", + "330 ['we propose a framework for modifying the lat... \n", + "457 ['tcn for multimodal semi-supervised learning ... \n", + "536 ['a semi-supervised multi-modal classification... \n", + "886 ['this paper proposes an effective method to c... \n", + "891 ['in network pruning, fine-tuning a pruned mod... \n", + "915 [\"a general upper bound on the target domain's... \n", + "1048 ['we propose a novel way to incorporate condit... \n", + "1052 ['this paper proposes an effective coding sche... \n", + "1060 ['learning with limited training data by explo... \n", + "1164 ['we propose the fusion discriminator, a novel... \n", + "1176 ['we study the effect of the embedding complex... \n", + "1451 ['batch normalization reduces robustness at te... \n", + "1506 ['batch normalization reduces adversarial robu... \n", + "1522 ['hybrid vision-driven imitation learning and ... \n", + "1531 ['learning with limited training data by explo... \n", + "1551 ['a generalized transformation-based gradient ... \n", + "1889 ['in structured network pruning, fine-tuning a... \n", + "\n", + " L_Clean_Title \n", + "254 generalized transformation-based gradient \n", + "302 deep imitative models for flexible inference, ... \n", + "327 optimal transport maps for distribution preser... \n", + "330 optimal transport maps for distribution preser... \n", + "457 semi-supervised classification by reaching con... \n", + "536 semi-supervised classification by reaching con... \n", + "886 minimal random code learning: getting bits bac... \n", + "891 rethinking the value of network pruning \n", + "915 the role of embedding complexity in domain-inv... \n", + "1048 structured prediction using cgans with fusion ... \n", + "1052 minimal random code learning: getting bits bac... \n", + "1060 soseleto: a unified approach to transfer learn... \n", + "1164 structured prediction using cgans with fusion ... \n", + "1176 the role of embedding complexity in domain-inv... \n", + "1451 batch normalization is a cause of adversarial ... \n", + "1506 batch normalization is a cause of adversarial ... \n", + "1522 deep imitative models for flexible inference, ... \n", + "1531 soseleto: a unified approach to transfer learn... \n", + "1551 generalized transformation-based gradient \n", + "1889 rethinking the value of network pruning " + ] + }, + "execution_count": 209, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "duplicate_Train_Title = NN_SciTLDR_train.duplicated(subset=['L_Clean_Title'], keep=False)\n", + "NN_SciTLDR_train[duplicate_Train_Title]" + ] + }, + { + "cell_type": "code", + "execution_count": 210, + "id": "7db7fcab", + "metadata": {}, + "outputs": [], + "source": [ + "DUP_TraiN = NN_SciTLDR_train[duplicate_Train_Title].index.values.tolist()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "818fa0ec", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 211, + "id": "7b9fc7b5", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Optimal transport maps for distribution preserving operations on latent spaces of Generative Models'" + ] + }, + "execution_count": 211, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Title'].loc[327] # Lower Case TitLe" + ] + }, + { + "cell_type": "code", + "execution_count": 212, + "id": "2bdfb5bd", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'optimal transport maps for distribution preserving operations on latent spaces of generative models'" + ] + }, + "execution_count": 212, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['L_Clean_Title'].loc[330] # Lower Case TitLe" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2416d3c4", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 213, + "id": "b0e2d4b7", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Clean_TitleClean_TextClean_SummaryL_Clean_TextL_Clean_SummaryL_Clean_Title
1060SOSELETO: A Unified Approach to Transfer Learn...['We present SOSELETO (SOurce SELEction for Ta...['Learning with limited training data by explo...['we present soseleto (source selection for ta...['learning with limited training data by explo...soseleto: a unified approach to transfer learn...
1531SOSELETO: A Unified Approach to Transfer Learn...['We present SOSELETO (SOurce SELEction for Ta...['Learning with limited training data by explo...['we present soseleto (source selection for ta...['learning with limited training data by explo...soseleto: a unified approach to transfer learn...
\n", + "
" + ], + "text/plain": [ + " Clean_Title \\\n", + "1060 SOSELETO: A Unified Approach to Transfer Learn... \n", + "1531 SOSELETO: A Unified Approach to Transfer Learn... \n", + "\n", + " Clean_Text \\\n", + "1060 ['We present SOSELETO (SOurce SELEction for Ta... \n", + "1531 ['We present SOSELETO (SOurce SELEction for Ta... \n", + "\n", + " Clean_Summary \\\n", + "1060 ['Learning with limited training data by explo... \n", + "1531 ['Learning with limited training data by explo... \n", + "\n", + " L_Clean_Text \\\n", + "1060 ['we present soseleto (source selection for ta... \n", + "1531 ['we present soseleto (source selection for ta... \n", + "\n", + " L_Clean_Summary \\\n", + "1060 ['learning with limited training data by explo... \n", + "1531 ['learning with limited training data by explo... \n", + "\n", + " L_Clean_Title \n", + "1060 soseleto: a unified approach to transfer learn... \n", + "1531 soseleto: a unified approach to transfer learn... " + ] + }, + "execution_count": 213, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "duplicate_Train_Text = NN_SciTLDR_train.duplicated(subset=['L_Clean_Text'], keep=False)\n", + "NN_SciTLDR_train[duplicate_Train_Text]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "47214e3c", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 214, + "id": "58602dae", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Clean_TitleClean_TextClean_SummaryL_Clean_TextL_Clean_SummaryL_Clean_Title
824GAN Dissection: Visualizing and Understanding ...['Generative Adversarial Networks (GANs) have ...['GAN representations are examined in detail, ...['generative adversarial networks (gans) have ...['gan representations are examined in detail, ...gan dissection: visualizing and understanding ...
877Visualizing and Understanding GANs['We present an analytic framework to visualiz...['GAN representations are examined in detail, ...['we present an analytic framework to visualiz...['gan representations are examined in detail, ...visualizing and understanding gans
1060SOSELETO: A Unified Approach to Transfer Learn...['We present SOSELETO (SOurce SELEction for Ta...['Learning with limited training data by explo...['we present soseleto (source selection for ta...['learning with limited training data by explo...soseleto: a unified approach to transfer learn...
1531SOSELETO: A Unified Approach to Transfer Learn...['We present SOSELETO (SOurce SELEction for Ta...['Learning with limited training data by explo...['we present soseleto (source selection for ta...['learning with limited training data by explo...soseleto: a unified approach to transfer learn...
\n", + "
" + ], + "text/plain": [ + " Clean_Title \\\n", + "824 GAN Dissection: Visualizing and Understanding ... \n", + "877 Visualizing and Understanding GANs \n", + "1060 SOSELETO: A Unified Approach to Transfer Learn... \n", + "1531 SOSELETO: A Unified Approach to Transfer Learn... \n", + "\n", + " Clean_Text \\\n", + "824 ['Generative Adversarial Networks (GANs) have ... \n", + "877 ['We present an analytic framework to visualiz... \n", + "1060 ['We present SOSELETO (SOurce SELEction for Ta... \n", + "1531 ['We present SOSELETO (SOurce SELEction for Ta... \n", + "\n", + " Clean_Summary \\\n", + "824 ['GAN representations are examined in detail, ... \n", + "877 ['GAN representations are examined in detail, ... \n", + "1060 ['Learning with limited training data by explo... \n", + "1531 ['Learning with limited training data by explo... \n", + "\n", + " L_Clean_Text \\\n", + "824 ['generative adversarial networks (gans) have ... \n", + "877 ['we present an analytic framework to visualiz... \n", + "1060 ['we present soseleto (source selection for ta... \n", + "1531 ['we present soseleto (source selection for ta... \n", + "\n", + " L_Clean_Summary \\\n", + "824 ['gan representations are examined in detail, ... \n", + "877 ['gan representations are examined in detail, ... \n", + "1060 ['learning with limited training data by explo... \n", + "1531 ['learning with limited training data by explo... \n", + "\n", + " L_Clean_Title \n", + "824 gan dissection: visualizing and understanding ... \n", + "877 visualizing and understanding gans \n", + "1060 soseleto: a unified approach to transfer learn... \n", + "1531 soseleto: a unified approach to transfer learn... " + ] + }, + "execution_count": 214, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "duplicate_Train_SUM = NN_SciTLDR_train.duplicated(subset=['L_Clean_Summary'], keep=False)\n", + "NN_SciTLDR_train[duplicate_Train_SUM]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8a080ce3", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 215, + "id": "b1511a7e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Clean_TitleClean_TextClean_SummaryL_Clean_TextL_Clean_SummaryL_Clean_Title
\n", + "
" + ], + "text/plain": [ + "Empty DataFrame\n", + "Columns: [Clean_Title, Clean_Text, Clean_Summary, L_Clean_Text, L_Clean_Summary, L_Clean_Title]\n", + "Index: []" + ] + }, + "execution_count": 215, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "duplicate_Test_Title = NN_SciTLDR_test.duplicated(subset=['L_Clean_Title'], keep=False)\n", + "NN_SciTLDR_test[duplicate_Test_Title]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3b3ee304", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 216, + "id": "a0aea7d5", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Clean_TitleClean_TextClean_SummaryL_Clean_TextL_Clean_SummaryL_Clean_Title
\n", + "
" + ], + "text/plain": [ + "Empty DataFrame\n", + "Columns: [Clean_Title, Clean_Text, Clean_Summary, L_Clean_Text, L_Clean_Summary, L_Clean_Title]\n", + "Index: []" + ] + }, + "execution_count": 216, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "duplicate_Test_Text = NN_SciTLDR_test.duplicated(subset=['L_Clean_Text'], keep=False)\n", + "NN_SciTLDR_test[duplicate_Test_Text]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b7de8cc5", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 217, + "id": "41cf0f33", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Clean_TitleClean_TextClean_SummaryL_Clean_TextL_Clean_SummaryL_Clean_Title
\n", + "
" + ], + "text/plain": [ + "Empty DataFrame\n", + "Columns: [Clean_Title, Clean_Text, Clean_Summary, L_Clean_Text, L_Clean_Summary, L_Clean_Title]\n", + "Index: []" + ] + }, + "execution_count": 217, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "duplicate_Test_SUM = NN_SciTLDR_test.duplicated(subset=['L_Clean_Summary'], keep=False)\n", + "NN_SciTLDR_test[duplicate_Test_SUM]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8a8cf3dd", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 218, + "id": "a83d17b2", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Clean_TitleClean_TextClean_SummaryL_Clean_TextL_Clean_SummaryL_Clean_Title
59Doubly Sparse: Sparse Mixture of Sparse Expert...['Computations for the softmax function in neu...['We present doubly sparse softmax, the sparse...['computations for the softmax function in neu...['we present doubly sparse softmax, the sparse...doubly sparse: sparse mixture of sparse expert...
126Efficient Inference and Exploration for Reinfo...['Despite an ever growing literature on reinfo...['We investigate the large-sample behaviors of...['despite an ever growing literature on reinfo...['we investigate the large-sample behaviors of...efficient inference and exploration for reinfo...
145On the Sensitivity of Adversarial Robustness t...['Neural networks are vulnerable to small adve...['Robustness performance of PGD trained models...['neural networks are vulnerable to small adve...['robustness performance of pgd trained models...on the sensitivity of adversarial robustness t...
273On the Sensitivity of Adversarial Robustness t...['Neural networks are vulnerable to small adve...['Robustness performance of PGD trained models...['neural networks are vulnerable to small adve...['robustness performance of pgd trained models...on the sensitivity of adversarial robustness t...
427Doubly Sparse: Sparse Mixture of Sparse Expert...['Computations for the softmax function in neu...['We present doubly sparse softmax, the sparse...['computations for the softmax function in neu...['we present doubly sparse softmax, the sparse...doubly sparse: sparse mixture of sparse expert...
571Efficient Inference and Exploration for Reinfo...['Despite an ever growing literature on reinfo...['We investigate the large-sample behaviors of...['despite an ever growing literature on reinfo...['we investigate the large-sample behaviors of...efficient inference and exploration for reinfo...
\n", + "
" + ], + "text/plain": [ + " Clean_Title \\\n", + "59 Doubly Sparse: Sparse Mixture of Sparse Expert... \n", + "126 Efficient Inference and Exploration for Reinfo... \n", + "145 On the Sensitivity of Adversarial Robustness t... \n", + "273 On the Sensitivity of Adversarial Robustness t... \n", + "427 Doubly Sparse: Sparse Mixture of Sparse Expert... \n", + "571 Efficient Inference and Exploration for Reinfo... \n", + "\n", + " Clean_Text \\\n", + "59 ['Computations for the softmax function in neu... \n", + "126 ['Despite an ever growing literature on reinfo... \n", + "145 ['Neural networks are vulnerable to small adve... \n", + "273 ['Neural networks are vulnerable to small adve... \n", + "427 ['Computations for the softmax function in neu... \n", + "571 ['Despite an ever growing literature on reinfo... \n", + "\n", + " Clean_Summary \\\n", + "59 ['We present doubly sparse softmax, the sparse... \n", + "126 ['We investigate the large-sample behaviors of... \n", + "145 ['Robustness performance of PGD trained models... \n", + "273 ['Robustness performance of PGD trained models... \n", + "427 ['We present doubly sparse softmax, the sparse... \n", + "571 ['We investigate the large-sample behaviors of... \n", + "\n", + " L_Clean_Text \\\n", + "59 ['computations for the softmax function in neu... \n", + "126 ['despite an ever growing literature on reinfo... \n", + "145 ['neural networks are vulnerable to small adve... \n", + "273 ['neural networks are vulnerable to small adve... \n", + "427 ['computations for the softmax function in neu... \n", + "571 ['despite an ever growing literature on reinfo... \n", + "\n", + " L_Clean_Summary \\\n", + "59 ['we present doubly sparse softmax, the sparse... \n", + "126 ['we investigate the large-sample behaviors of... \n", + "145 ['robustness performance of pgd trained models... \n", + "273 ['robustness performance of pgd trained models... \n", + "427 ['we present doubly sparse softmax, the sparse... \n", + "571 ['we investigate the large-sample behaviors of... \n", + "\n", + " L_Clean_Title \n", + "59 doubly sparse: sparse mixture of sparse expert... \n", + "126 efficient inference and exploration for reinfo... \n", + "145 on the sensitivity of adversarial robustness t... \n", + "273 on the sensitivity of adversarial robustness t... \n", + "427 doubly sparse: sparse mixture of sparse expert... \n", + "571 efficient inference and exploration for reinfo... " + ] + }, + "execution_count": 218, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "duplicate_Val_Title = NN_SciTLDR_dev.duplicated(subset=['L_Clean_Title'], keep=False)\n", + "NN_SciTLDR_dev[duplicate_Val_Title]" + ] + }, + { + "cell_type": "code", + "execution_count": 219, + "id": "aa17873a", + "metadata": {}, + "outputs": [], + "source": [ + "DUP_VaL = NN_SciTLDR_dev[duplicate_Val_Title].index.values.tolist()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4d2b21ea", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 220, + "id": "a53688af", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Clean_TitleClean_TextClean_SummaryL_Clean_TextL_Clean_SummaryL_Clean_Title
126Efficient Inference and Exploration for Reinfo...['Despite an ever growing literature on reinfo...['We investigate the large-sample behaviors of...['despite an ever growing literature on reinfo...['we investigate the large-sample behaviors of...efficient inference and exploration for reinfo...
571Efficient Inference and Exploration for Reinfo...['Despite an ever growing literature on reinfo...['We investigate the large-sample behaviors of...['despite an ever growing literature on reinfo...['we investigate the large-sample behaviors of...efficient inference and exploration for reinfo...
\n", + "
" + ], + "text/plain": [ + " Clean_Title \\\n", + "126 Efficient Inference and Exploration for Reinfo... \n", + "571 Efficient Inference and Exploration for Reinfo... \n", + "\n", + " Clean_Text \\\n", + "126 ['Despite an ever growing literature on reinfo... \n", + "571 ['Despite an ever growing literature on reinfo... \n", + "\n", + " Clean_Summary \\\n", + "126 ['We investigate the large-sample behaviors of... \n", + "571 ['We investigate the large-sample behaviors of... \n", + "\n", + " L_Clean_Text \\\n", + "126 ['despite an ever growing literature on reinfo... \n", + "571 ['despite an ever growing literature on reinfo... \n", + "\n", + " L_Clean_Summary \\\n", + "126 ['we investigate the large-sample behaviors of... \n", + "571 ['we investigate the large-sample behaviors of... \n", + "\n", + " L_Clean_Title \n", + "126 efficient inference and exploration for reinfo... \n", + "571 efficient inference and exploration for reinfo... " + ] + }, + "execution_count": 220, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "duplicate_Val_Text = NN_SciTLDR_dev.duplicated(subset=['L_Clean_Text'], keep=False)\n", + "NN_SciTLDR_dev[duplicate_Val_Text]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5bfd64b7", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 221, + "id": "ee2a5d5a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Clean_TitleClean_TextClean_SummaryL_Clean_TextL_Clean_SummaryL_Clean_Title
\n", + "
" + ], + "text/plain": [ + "Empty DataFrame\n", + "Columns: [Clean_Title, Clean_Text, Clean_Summary, L_Clean_Text, L_Clean_Summary, L_Clean_Title]\n", + "Index: []" + ] + }, + "execution_count": 221, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "duplicate_Val_SUM = NN_SciTLDR_dev.duplicated(subset=['L_Clean_Summary'], keep=False)\n", + "NN_SciTLDR_dev[duplicate_Val_SUM]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ab659820", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "85ffcfc9", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0057baa6", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "3e0d5086", + "metadata": {}, + "source": [ + "## Drop DupLicaTe" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "28586bf2", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 222, + "id": "27d75366", + "metadata": {}, + "outputs": [], + "source": [ + "# list of indexes or row numbers\n", + "\n", + "NN_SciTLDR_train = NN_SciTLDR_train.drop(DUP_TraiN)\n", + "NN_SciTLDR_train = NN_SciTLDR_train.drop([824,877])\n", + "\n", + "\n", + "NN_SciTLDR_dev = NN_SciTLDR_dev.drop(DUP_VaL)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "749d85e3", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 223, + "id": "ee9d1c93", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(1970, 6)" + ] + }, + "execution_count": 223, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 224, + "id": "6488d9cb", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(618, 6)" + ] + }, + "execution_count": 224, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 225, + "id": "58e68c54", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(613, 6)" + ] + }, + "execution_count": 225, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8532516e", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "cc887542", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "57b79530", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "d1d39ad2", + "metadata": {}, + "source": [ + "### NN_SciTLDR_train" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ffd90adc", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 226, + "id": "5663fd67", + "metadata": {}, + "outputs": [], + "source": [ + "# pattern = re.compile(r'\\\\\\\\')\n", + "# df0 = NN_SciTLDR_train[NN_SciTLDR_train['Clean_Text'].str.contains(pattern)]\n", + "# df0" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3773e1e5", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "raw", + "id": "4ab2df85", + "metadata": {}, + "source": [ + "r'\\([^)]*\\)'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ad5aa436", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 149, + "id": "f271feb4", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['Recent efforts on combining deep models with probabilistic graphical models are promising in providing flexible models that are also easy to interpret.', 'We propose a variational message-passing algorithm for variational inference in such models.', 'We make three contributions.', 'First, we propose structured inference networks that incorporate the structure of the graphical model in the inference network of variational auto-encoders (VAE).', 'Second, we establish conditions under which such inference networks enable fast amortized inference similar to VAE.', 'Finally, we derive a variational message passing algorithm to perform efficient natural-gradient inference while retaining the efficiency of the amortized inference.', 'By simultaneously enabling structured, amortized, and natural-gradient inference for deep structured models, our method simplifies and generalizes existing methods.']\"" + ] + }, + "execution_count": 149, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[4]" + ] + }, + { + "cell_type": "code", + "execution_count": 294, + "id": "57817580", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Recent efforts on combining deep models with probabilistic graphical models are promising in providing flexible models that are also easy to interpret.We propose a variational message-passing algorithm for variational inference in such models.We make three contributions.First, we propose structured inference networks that incorporate the structure of the graphical model in the inference network of variational auto-encoders.Second, we establish conditions under which such inference networks enable fast amortized inference similar to VAE.Finally, we derive a variational message passing algorithm to perform efficient natural-gradient inference while retaining the efficiency of the amortized inference.By simultaneously enabling structured, amortized, and natural-gradient inference for deep structured models, our method simplifies and generalizes existing methods.'" + ] + }, + "execution_count": 294, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[4]" + ] + }, + { + "cell_type": "code", + "execution_count": 151, + "id": "a2ddd005", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['Determining the appropriate batch size for mini-batch gradient descent is always time consuming as it often relies on grid search.', 'This paper considers a resizable mini-batch gradient descent (RMGD) algorithm based on a multi-armed bandit that achieves performance equivalent to that of best fixed batch-size.', 'At each epoch, the RMGD samples a batch size according to a certain probability distribution proportional to a batch being successful in reducing the loss function.', 'Sampling from this probability provides a mechanism for exploring different batch size and exploiting batch sizes with history of success. ', 'After obtaining the validation loss at each epoch with the sampled batch size, the probability distribution is updated to incorporate the effectiveness of the sampled batch size.', 'Experimental results show that the RMGD achieves performance better than the best performing single batch size.', 'It is surprising that the RMGD achieves better performance than grid search.', 'Furthermore, it attains this performance in a shorter amount of time than grid search.']\"" + ] + }, + "execution_count": 151, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[1984]" + ] + }, + { + "cell_type": "code", + "execution_count": 295, + "id": "a9b1a07e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Determining the appropriate batch size for mini-batch gradient descent is always time consuming as it often relies on grid search.This paper considers a resizable mini-batch gradient descent algorithm based on a multi-armed bandit that achieves performance equivalent to that of best fixed batch-size.At each epoch, the RMGD samples a batch size according to a certain probability distribution proportional to a batch being successful in reducing the loss function.Sampling from this probability provides a mechanism for exploring different batch size and exploiting batch sizes with history of success. After obtaining the validation loss at each epoch with the sampled batch size, the probability distribution is updated to incorporate the effectiveness of the sampled batch size.Experimental results show that the RMGD achieves performance better than the best performing single batch size.It is surprising that the RMGD achieves better performance than grid search.Furthermore, it attains this performance in a shorter amount of time than grid search.'" + ] + }, + "execution_count": 295, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[1984]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c620eebe", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "raw", + "id": "44ed4520", + "metadata": {}, + "source": [ + "r'\\{[^}]*\\}'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d753fedc", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 153, + "id": "85599870", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['Batch Normalization (BN) and its variants have seen widespread adoption in the deep learning community because they improve the training of deep neural networks.', 'Discussions of why this normalization works so well remain unsettled. ', 'We make explicit the relationship between ordinary least squares and partial derivatives computed when back-propagating through BN.', 'We recast the back-propagation of BN as a least squares fit, which zero-centers and decorrelates partial derivatives from normalized activations.', 'This view, which we term {\\\\\\\\em gradient-least-squares}, is an extensible and arithmetically accurate description of BN.', 'To further explore this perspective, we motivate, interpret, and evaluate two adjustments to BN.']\"" + ] + }, + "execution_count": 153, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[35]" + ] + }, + { + "cell_type": "code", + "execution_count": 296, + "id": "dce9fffb", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Batch Normalization and its variants have seen widespread adoption in the deep learning community because they improve the training of deep neural networks.Discussions of why this normalization works so well remain unsettled. We make explicit the relationship between ordinary least squares and partial derivatives computed when back-propagating through BN.We recast the back-propagation of BN as a least squares fit, which zero-centers and decorrelates partial derivatives from normalized activations.This view, which we term, is an extensible and arithmetically accurate description of BN.To further explore this perspective, we motivate, interpret, and evaluate two adjustments to BN.'" + ] + }, + "execution_count": 296, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[35]" + ] + }, + { + "cell_type": "code", + "execution_count": 155, + "id": "2be45510", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['Compression is a key step to deploy large neural networks on resource-constrained platforms.', 'As a popular compression technique, quantization constrains the number of distinct weight values and thus reducing the number of bits required to represent and store each weight.', 'In this paper, we study the representation power of quantized neural networks.', 'First, we prove the universal approximability of quantized ReLU networks on a wide class of functions.', 'Then we provide upper bounds on the number of weights and the memory size for a given approximation error bound and the bit-width of weights for function-independent and function-dependent structures.', 'Our results reveal that, to attain an approximation error bound of $\\\\\\\\epsilon$, the number of weights needed by a quantized network is no more than $\\\\\\\\mathcal{O}\\\\\\\\left(\\\\\\\\log^5(1/\\\\\\\\epsilon)\\\\\\\\right)$ times that of an unquantized network.', 'This overhead is of much lower order than the lower bound of the number of weights needed for the error bound, supporting the empirical success of various quantization techniques.', 'To the best of our knowledge, this is the first in-depth study on the complexity bounds of quantized neural networks.']\"" + ] + }, + "execution_count": 155, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[98]" + ] + }, + { + "cell_type": "code", + "execution_count": 297, + "id": "ca975509", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Compression is a key step to deploy large neural networks on resource-constrained platforms.As a popular compression technique, quantization constrains the number of distinct weight values and thus reducing the number of bits required to represent and store each weight.In this paper, we study the representation power of quantized neural networks.First, we prove the universal approximability of quantized ReLU networks on a wide class of functions.Then we provide upper bounds on the number of weights and the memory size for a given approximation error bound and the bit-width of weights for function-independent and function-dependent structures.Our results reveal that, to attain an approximation error bound of, the number of weights needed by a quantized network is no more than times that of an unquantized network.This overhead is of much lower order than the lower bound of the number of weights needed for the error bound, supporting the empirical success of various quantization techniques.To the best of our knowledge, this is the first in-depth study on the complexity bounds of quantized neural networks.'" + ] + }, + "execution_count": 297, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[98]" + ] + }, + { + "cell_type": "code", + "execution_count": 157, + "id": "4bd62a87", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'[\\'Current practice in machine learning is to employ deep nets in an overparametrized limit, with the nominal number of parameters typically exceeding the number of measurements.\\', \\'This resembles the situation in compressed sensing, or in sparse regression with $l_1$ penalty terms, and provides a theoretical avenue for understanding phenomena that arise in the context of deep nets.\\', \\'One such phenonemon is the success of deep nets in providing good generalization in an interpolating regime with zero training error.\\', \\'Traditional statistical practice calls for regularization or smoothing to prevent \"overfitting\" (poor generalization performance).\\', \\'However, recent work shows that there exist data interpolation procedures which are statistically consistent and provide good generalization performance\\\\\\\\cite{belkin2018overfitting} (\"perfect fitting\").\\', \\'In this context, it has been suggested that \"classical\" and \"modern\" regimes for machine learning are separated by a peak in the generalization error (\"risk\") curve, a phenomenon dubbed \"double descent\"\\\\\\\\cite{belkin2019reconciling}.\\', \\'While such overfitting peaks do exist and arise from ill-conditioned design matrices, here we challenge the interpretation of the overfitting peak as demarcating the regime where good generalization occurs under overparametrization. \\\\n\\\\n\\', \\'We propose a model of Misparamatrized Sparse Regression (MiSpaR) and analytically compute the GE curves for $l_2$ and $l_1$ penalties.\\', \\'We show that the overfitting peak arising in the interpolation limit is dissociated from the regime of good generalization.\\', \\'The analytical expressions are obtained in the so called \"thermodynamic\" limit.\\', \\'We find an additional interesting phenomenon: increasing overparametrization in the fitting model increases sparsity, which should intuitively improve performance of $l_1$ penalized regression.\\', \\'However, at the same time, the relative number of measurements decrease compared to the number of fitting parameters, and eventually overparametrization does lead to poor generalization.\\', \\'Nevertheless, $l_1$ penalized regression can show good generalization performance under conditions of data interpolation even with a large amount of overparametrization.\\', \\'These results provide a theoretical avenue into studying inverse problems in the interpolating regime using overparametrized fitting functions such as deep nets.\\']'" + ] + }, + "execution_count": 157, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[1982]" + ] + }, + { + "cell_type": "code", + "execution_count": 298, + "id": "a0e273fe", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Current practice in machine learning is to employ deep nets in an overparametrized limit, with the nominal number of parameters typically exceeding the number of measurements.This resembles the situation in compressed sensing, or in sparse regression with penalty terms, and provides a theoretical avenue for understanding phenomena that arise in the context of deep nets.One such phenonemon is the success of deep nets in providing good generalization in an interpolating regime with zero training error.Traditional statistical practice calls for regularization or smoothing to prevent \"overfitting\".However, recent work shows that there exist data interpolation procedures which are statistically consistent and provide good generalization performance.In this context, it has been suggested that \"classical\" and \"modern\" regimes for machine learning are separated by a peak in the generalization error curve, a phenomenon dubbed \"double descent\".While such overfitting peaks do exist and arise from ill-conditioned design matrices, here we challenge the interpretation of the overfitting peak as demarcating the regime where good generalization occurs under overparametrization.We propose a model of Misparamatrized Sparse Regression and analytically compute the GE curves for and penalties.We show that the overfitting peak arising in the interpolation limit is dissociated from the regime of good generalization.The analytical expressions are obtained in the so called \"thermodynamic\" limit.We find an additional interesting phenomenon: increasing overparametrization in the fitting model increases sparsity, which should intuitively improve performance of penalized regression.However, at the same time, the relative number of measurements decrease compared to the number of fitting parameters, and eventually overparametrization does lead to poor generalization.Nevertheless, penalized regression can show good generalization performance under conditions of data interpolation even with a large amount of overparametrization.These results provide a theoretical avenue into studying inverse problems in the interpolating regime using overparametrized fitting functions such as deep nets.'" + ] + }, + "execution_count": 298, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[1982]" + ] + }, + { + "cell_type": "code", + "execution_count": 159, + "id": "ff4783b3", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['The ability to autonomously explore and navigate a physical space is a fundamental requirement for virtually any mobile autonomous agent, from household robotic vacuums to autonomous vehicles.', 'Traditional SLAM-based approaches for exploration and navigation largely focus on leveraging scene geometry, but fail to model dynamic objects (such as other agents) or semantic constraints (such as wet floors or doorways).', 'Learning-based RL agents are an attractive alternative because they can incorporate both semantic and geometric information, but are notoriously sample inefficient, difficult to generalize to novel settings, and are difficult to interpret.', 'In this paper, we combine the best of both worlds with a modular approach that {\\\\\\\\em learns} a spatial representation of a scene that is trained to be effective when coupled with traditional geometric planners.', 'Specifically, we design an agent that learns to predict a spatial affordance map that elucidates what parts of a scene are navigable through active self-supervised experience gathering.', 'In contrast to most simulation environments that assume a static world, we evaluate our approach in the VizDoom simulator, using large-scale randomly-generated maps containing a variety of dynamic actors and hazards.', 'We show that learned affordance maps can be used to augment traditional approaches for both exploration and navigation, providing significant improvements in performance.']\"" + ] + }, + "execution_count": 159, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[1991]" + ] + }, + { + "cell_type": "code", + "execution_count": 299, + "id": "250122a6", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'The ability to autonomously explore and navigate a physical space is a fundamental requirement for virtually any mobile autonomous agent, from household robotic vacuums to autonomous vehicles.Traditional SLAM-based approaches for exploration and navigation largely focus on leveraging scene geometry, but fail to model dynamic objects or semantic constraints.Learning-based RL agents are an attractive alternative because they can incorporate both semantic and geometric information, but are notoriously sample inefficient, difficult to generalize to novel settings, and are difficult to interpret.In this paper, we combine the best of both worlds with a modular approach that a spatial representation of a scene that is trained to be effective when coupled with traditional geometric planners.Specifically, we design an agent that learns to predict a spatial affordance map that elucidates what parts of a scene are navigable through active self-supervised experience gathering.In contrast to most simulation environments that assume a static world, we evaluate our approach in the VizDoom simulator, using large-scale randomly-generated maps containing a variety of dynamic actors and hazards.We show that learned affordance maps can be used to augment traditional approaches for both exploration and navigation, providing significant improvements in performance.'" + ] + }, + "execution_count": 299, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[1991]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e7c874fd", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "raw", + "id": "7e1a730a", + "metadata": {}, + "source": [ + "r'\\$[^$]*\\$'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2b7eed8a", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 161, + "id": "64261fcd", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['Neural networks with low-precision weights and activations offer compelling\\\\n', 'efficiency advantages over their full-precision equivalents.', 'The two most\\\\n', 'frequently discussed benefits of quantization are reduced memory consumption,\\\\n', 'and a faster forward pass when implemented with efficient bitwise\\\\n', 'operations.', 'We propose a third benefit of very low-precision neural networks:\\\\n', 'improved robustness against some adversarial attacks, and in the worst case,\\\\n', 'performance that is on par with full-precision models.', 'We focus on the very\\\\n', 'low-precision case where weights and activations are both quantized to $\\\\\\\\pm$1,\\\\n', 'and note that stochastically quantizing weights in just one layer can sharply\\\\n', 'reduce the impact of iterative attacks.', 'We observe that non-scaled binary neural\\\\n', 'networks exhibit a similar effect to the original \\\\\\\\emph{defensive distillation}\\\\n', 'procedure that led to \\\\\\\\emph{gradient masking}, and a false notion of security.\\\\n', 'We address this by conducting both black-box and white-box experiments with\\\\n', 'binary models that do not artificially mask gradients.']\"" + ] + }, + "execution_count": 161, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[117]" + ] + }, + { + "cell_type": "code", + "execution_count": 300, + "id": "7d20c206", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Neural networks with low-precision weights and activations offer compellingefficiency advantages over their full-precision equivalents.The two mostfrequently discussed benefits of quantization are reduced memory consumption,and a faster forward pass when implemented with efficient bitwiseoperations.We propose a third benefit of very low-precision neural networks:improved robustness against some adversarial attacks, and in the worst case,performance that is on par with full-precision models.We focus on the verylow-precision case where weights and activations are both quantized to1,and note that stochastically quantizing weights in just one layer can sharplyreduce the impact of iterative attacks.We observe that non-scaled binary neuralnetworks exhibit a similar effect to the original procedure that led to , and a false notion of security.We address this by conducting both black-box and white-box experiments withbinary models that do not artificially mask gradients.'" + ] + }, + "execution_count": 300, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[117] " + ] + }, + { + "cell_type": "code", + "execution_count": 163, + "id": "9e323e1a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'[\\'In this paper we investigate the family of functions representable by deep neural networks (DNN) with rectified linear units (ReLU).\\', \\'We give an algorithm to train a ReLU DNN with one hidden layer to {\\\\\\\\em global optimality} with runtime polynomial in the data size albeit exponential in the input dimension.\\', \\'Further, we improve on the known lower bounds on size (from exponential to super exponential) for approximating a ReLU deep net function by a shallower ReLU net.\\', \"Our gap theorems hold for smoothly parametrized families of ``hard\\'\\' functions, contrary to countable, discrete families known in the literature. \", \\'An example consequence of our gap theorems is the following: for every natural number $k$ there exists a function representable by a ReLU DNN with $k^2$ hidden layers and total size $k^3$, such that any ReLU DNN with at most $k$ hidden layers will require at least $\\\\\\\\frac12k^{k+1}-1$ total nodes.\\', \\'Finally, for the family of $\\\\\\\\R^n\\\\\\\\to \\\\\\\\R$ DNNs with ReLU activations, we show a new lowerbound on the number of affine pieces, which is larger than previous constructions in certain regimes of the network architecture and most distinctively our lowerbound is demonstrated by an explicit construction of a \\\\\\\\emph{smoothly parameterized} family of functions attaining this scaling.\\', \\'Our construction utilizes the theory of zonotopes from polyhedral theory.\\']'" + ] + }, + "execution_count": 163, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[131]" + ] + }, + { + "cell_type": "code", + "execution_count": 306, + "id": "4230ce21", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'In this paper we investigate the family of functions representable by deep neural networks with rectified linear units.We give an algorithm to train a ReLU DNN with one hidden layer to with runtime polynomial in the data size albeit exponential in the input dimension.Further, we improve on the known lower bounds on size for approximating a ReLU deep net function by a shallower ReLU net.\"Our gap theorems hold for smoothly parametrized families of hard functions, contrary to countable, discrete families known in the literature.An example consequence of our gap theorems is the following: for every natural number there exists a function representable by a ReLU DNN with hidden layers and total size, such that any ReLU DNN with at most hidden layers will require at least total nodes.Finally, for the family of DNNs with ReLU activations, we show a new lowerbound on the number of affine pieces, which is larger than previous constructions in certain regimes of the network architecture and most distinctively our lowerbound is demonstrated by an explicit construction of a family of functions attaining this scaling.Our construction utilizes the theory of zonotopes from polyhedral theory.'" + ] + }, + "execution_count": 306, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[131] " + ] + }, + { + "cell_type": "code", + "execution_count": 165, + "id": "99e83795", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['Given samples from a group of related regression tasks, a data-enriched model describes observations by a common and per-group individual parameters.', 'In high-dimensional regime, each parameter has its own structure such as sparsity or group sparsity.', 'In this paper, we consider the general form of data enrichment where data comes in a fixed but arbitrary number of tasks $G$ and any convex function, e.g., norm, can characterize the structure of both common and individual parameters. \\\\t', 'We propose an estimator for the high-dimensional data enriched model and investigate its statistical properties. ', 'We delineate the sample complexity of our estimator and provide high probability non-asymptotic bound for estimation error of all parameters under a condition weaker than the state-of-the-art.', 'We propose an iterative estimation algorithm with a geometric convergence rate.', 'Overall, we present a first through statistical and computational analysis of inference in the data enriched model. \\\\n\\\\t']\"" + ] + }, + "execution_count": 165, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[1924]" + ] + }, + { + "cell_type": "code", + "execution_count": 307, + "id": "207c4513", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Given samples from a group of related regression tasks, a data-enriched model describes observations by a common and per-group individual parameters.In high-dimensional regime, each parameter has its own structure such as sparsity or group sparsity.In this paper, we consider the general form of data enrichment where data comes in a fixed but arbitrary number of tasks and any convex function, e.g., norm, can characterize the structure of both common and individual parameters. We propose an estimator for the high-dimensional data enriched model and investigate its statistical properties. We delineate the sample complexity of our estimator and provide high probability non-asymptotic bound for estimation error of all parameters under a condition weaker than the state-of-the-art.We propose an iterative estimation algorithm with a geometric convergence rate.Overall, we present a first through statistical and computational analysis of inference in the data enriched model.'" + ] + }, + "execution_count": 307, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[1924] " + ] + }, + { + "cell_type": "code", + "execution_count": 167, + "id": "bc0d0a3f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['Open-domain dialogue generation has gained increasing attention in Natural Language Processing.', 'Comparing these methods requires a holistic means of dialogue evaluation.', 'Human ratings are deemed as the gold standard.', 'As human evaluation is inefficient and costly, an automated substitute is desirable.', 'In this paper, we propose holistic evaluation metrics which capture both the quality and diversity of dialogues.', 'Our metrics consists of (1) GPT-2 based context coherence between sentences in a dialogue, (2) GPT-2 based fluency in phrasing, and, (3) $n$-gram based diversity in responses to augmented queries.', 'The empirical validity of our metrics is demonstrated by strong correlation with human judgments.', 'We provide the associated code, datasets and human ratings.']\"" + ] + }, + "execution_count": 167, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[1975]" + ] + }, + { + "cell_type": "code", + "execution_count": 308, + "id": "13ad8a59", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Open-domain dialogue generation has gained increasing attention in Natural Language Processing.Comparing these methods requires a holistic means of dialogue evaluation.Human ratings are deemed as the gold standard.As human evaluation is inefficient and costly, an automated substitute is desirable.In this paper, we propose holistic evaluation metrics which capture both the quality and diversity of dialogues.Our metrics consists of GPT-2 based context coherence between sentences in a dialogue, GPT-2 based fluency in phrasing, and,-gram based diversity in responses to augmented queries.The empirical validity of our metrics is demonstrated by strong correlation with human judgments.We provide the associated code, datasets and human ratings.'" + ] + }, + "execution_count": 308, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[1975]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9df0fc97", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "raw", + "id": "f48d8ad0", + "metadata": {}, + "source": [ + "\\\\\\\\" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "82670548", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 169, + "id": "1a6049e1", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['In this work we explore a straightforward variational Bayes scheme for Recurrent Neural Networks.\\\\n', 'Firstly, we show that a simple adaptation of truncated backpropagation through time can yield good quality uncertainty estimates and superior regularisation at only a small extra computational cost during training, also reducing the amount of parameters by 80\\\\\\\\%.\\\\n', 'Secondly, we demonstrate how a novel kind of posterior approximation yields further improvements to the performance of Bayesian RNNs.', 'We incorporate local gradient information into the approximate posterior to sharpen it around the current batch statistics.', 'We show how this technique is not exclusive to recurrent neural networks and can be applied more widely to train Bayesian neural networks.\\\\n', 'We also empirically demonstrate how Bayesian RNNs are superior to traditional RNNs on a language modelling benchmark and an image captioning task, as well as showing how each of these methods improve our model over a variety of other schemes for training them.', 'We also introduce a new benchmark for studying uncertainty for language models so future methods can be easily compared.']\"" + ] + }, + "execution_count": 169, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[63]" + ] + }, + { + "cell_type": "code", + "execution_count": 314, + "id": "872102b6", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'In this work we explore a straightforward variational Bayes scheme for Recurrent Neural Networks.Firstly, we show that a simple adaptation of truncated backpropagation through time can yield good quality uncertainty estimates and superior regularisation at only a small extra computational cost during training, also reducing the amount of parameters by 80%.Secondly, we demonstrate how a novel kind of posterior approximation yields further improvements to the performance of Bayesian RNNs.We incorporate local gradient information into the approximate posterior to sharpen it around the current batch statistics.We show how this technique is not exclusive to recurrent neural networks and can be applied more widely to train Bayesian neural networks.We also empirically demonstrate how Bayesian RNNs are superior to traditional RNNs on a language modelling benchmark and an image captioning task, as well as showing how each of these methods improve our model over a variety of other schemes for training them.We also introduce a new benchmark for studying uncertainty for language models so future methods can be easily compared.'" + ] + }, + "execution_count": 314, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[63]" + ] + }, + { + "cell_type": "code", + "execution_count": 171, + "id": "9128648f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['In this paper, we first identify \\\\\\\\textit{angle bias}, a simple but remarkable phenomenon that causes the vanishing gradient problem in a multilayer perceptron (MLP) with sigmoid activation functions.', 'We then propose \\\\\\\\textit{linearly constrained weights (LCW)} to reduce the angle bias in a neural network, so as to train the network under the constraints that the sum of the elements of each weight vector is zero.', 'A reparameterization technique is presented to efficiently train a model with LCW by embedding the constraints on weight vectors into the structure of the network.', 'Interestingly, batch normalization (Ioffe & Szegedy, 2015) can be viewed as a mechanism to correct angle bias.', 'Preliminary experiments show that LCW helps train a 100-layered MLP more efficiently than does batch normalization.']\"" + ] + }, + "execution_count": 171, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[1951]" + ] + }, + { + "cell_type": "code", + "execution_count": 317, + "id": "968790c6", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'In this paper, we first identify , a simple but remarkable phenomenon that causes the vanishing gradient problem in a multilayer perceptron with sigmoid activation functions.We then propose to reduce the angle bias in a neural network, so as to train the network under the constraints that the sum of the elements of each weight vector is zero.A reparameterization technique is presented to efficiently train a model with LCW by embedding the constraints on weight vectors into the structure of the network.Interestingly, batch normalization can be viewed as a mechanism to correct angle bias.Preliminary experiments show that LCW helps train a 100-layered MLP more efficiently than does batch normalization.'" + ] + }, + "execution_count": 317, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[1951] " + ] + }, + { + "cell_type": "code", + "execution_count": 173, + "id": "eeb83bf4", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['One of the challenges in training generative models such as the variational auto encoder (VAE) is avoiding posterior collapse.', 'When the generator has too much capacity, it is prone to ignoring latent code.', 'This problem is exacerbated when the dataset is small, and the latent dimension is high.', 'The root of the problem is the ELBO objective, specifically the Kullback–Leibler (KL) divergence term in objective function.', 'This paper proposes a new objective function to replace the KL term with one that emulates the maximum mean discrepancy (MMD) objective.', 'It also introduces a new technique, named latent clipping, that is used to control distance between samples in latent space.', 'A probabilistic autoencoder model, named $\\\\\\\\mu$-VAE, is designed and trained on MNIST and MNIST Fashion datasets, using the new objective function and is shown to outperform models trained with ELBO and $\\\\\\\\beta$-VAE objective.', 'The $\\\\\\\\mu$-VAE is less prone to posterior collapse, and can generate reconstructions and new samples in good quality.', 'Latent representations learned by $\\\\\\\\mu$-VAE are shown to be good and can be used for downstream tasks such as classification. ']\"" + ] + }, + "execution_count": 173, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[1969]" + ] + }, + { + "cell_type": "code", + "execution_count": 311, + "id": "06702eca", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'One of the challenges in training generative models such as the variational auto encoder is avoiding posterior collapse.When the generator has too much capacity, it is prone to ignoring latent code.This problem is exacerbated when the dataset is small, and the latent dimension is high.The root of the problem is the ELBO objective, specifically the Kullback–Leibler divergence term in objective function.This paper proposes a new objective function to replace the KL term with one that emulates the maximum mean discrepancy objective.It also introduces a new technique, named latent clipping, that is used to control distance between samples in latent space.A probabilistic autoencoder model, named-VAE, is designed and trained on MNIST and MNIST Fashion datasets, using the new objective function and is shown to outperform models trained with ELBO and-VAE objective.The-VAE is less prone to posterior collapse, and can generate reconstructions and new samples in good quality.Latent representations learned by-VAE are shown to be good and can be used for downstream tasks such as classification. '" + ] + }, + "execution_count": 311, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.Clean_Text[1969]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "111195c4", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "9f5bcd56", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "835e7ecb", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "881d9bed", + "metadata": {}, + "source": [ + "### Remove TexT -- > TraiN" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f191beb8", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 227, + "id": "3881884b", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\([^)]*\\)')\n", + "\n", + "for index, row in NN_SciTLDR_train.iterrows(): \n", + " row['Clean_Text'] = pattern.sub(r'\\0',row['Clean_Text'])\n", + "\n", + " \n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('\\x00',''))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 228, + "id": "ed322c36", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\{[^}]*\\}')\n", + "\n", + "for index, row in NN_SciTLDR_train.iterrows(): \n", + " row['Clean_Text'] = pattern.sub(r'\\0',row['Clean_Text'])\n", + "\n", + " \n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('\\x00','')) " + ] + }, + { + "cell_type": "code", + "execution_count": 229, + "id": "803b7172", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\$[^$]*\\$')\n", + "\n", + "for index, row in NN_SciTLDR_train.iterrows(): \n", + " row['Clean_Text'] = pattern.sub(r'\\0',row['Clean_Text'])\n", + "\n", + " \n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('\\x00','')) " + ] + }, + { + "cell_type": "code", + "execution_count": 230, + "id": "bc3edc29", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\', \\'')\n", + "\n", + "for index, row in NN_SciTLDR_train.iterrows(): \n", + " row['Clean_Text'] = pattern.sub(r'\\0',row['Clean_Text'])\n", + "\n", + " \n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('\\x00','')) " + ] + }, + { + "cell_type": "code", + "execution_count": 231, + "id": "dcefbfbf", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\\\n')\n", + "\n", + "for index, row in NN_SciTLDR_train.iterrows(): \n", + " row['Clean_Text'] = pattern.sub(r'\\0',row['Clean_Text'])\n", + "\n", + " \n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('\\x00','')) " + ] + }, + { + "cell_type": "code", + "execution_count": 232, + "id": "0e499bbb", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('\\']','@]'))\n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('[\\'','@['))\n", + "\n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('\"]','@]'))\n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('[\"','@['))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 233, + "id": "ebdea0b5", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('@]',''))\n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('@[',''))\n", + "\n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('@]',''))\n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('@[',''))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 234, + "id": "d48cd6d8", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('.\\', \\'','.'))" + ] + }, + { + "cell_type": "code", + "execution_count": 302, + "id": "3ba82b0d", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('``',''))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f2c10bb0", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2e2b4595", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 290, + "id": "f0627ed6", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace(\"\\\\\\\\cite\",''))\n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace(\"\\\\\\\\emph\",''))\n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace(\"\\\\t\",''))\n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace(\"\\\\\\\\textit\",''))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 293, + "id": "24e3dc83", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "edeb22ad", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 303, + "id": "c244ca94", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('.\\', ','.'))\n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('. \", \\'','.'))\n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('\\'\\'',''))" + ] + }, + { + "cell_type": "code", + "execution_count": 312, + "id": "29ba7404", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 313, + "id": "9bceaa5a", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('\\\\\\\\%','%'))\n", + "\n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace('extit','@'))\n", + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace(\"\\\\@\",''))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 316, + "id": "17e33fe2", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 318, + "id": "a3a43873", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_train[\"Clean_Text\"] = NN_SciTLDR_train[\"Clean_Text\"].apply(lambda x: x.replace(' ',' '))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 138, + "id": "5d511919", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "08979850", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "05d62fd0", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a155aef1", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "352d93ab", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ddbc57ff", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "862c357b", + "metadata": {}, + "source": [ + "### NN_SciTLDR_test" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "40918a1a", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 148, + "id": "f8c26cb7", + "metadata": {}, + "outputs": [], + "source": [ + "# pattern = re.compile(r'\\\\\\\\')\n", + "\n", + "# df0 = NN_SciTLDR_test[NN_SciTLDR_test['Clean_Text'].str.contains(pattern)]\n", + "# df0" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "337cd69f", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "raw", + "id": "2300f781", + "metadata": {}, + "source": [ + "r'\\([^)]*\\)'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "567090e9", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 91, + "id": "98b15ea1", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['We present a Neural Program Search, an algorithm to generate programs from natural language description and a small number of input / output examples.', 'The algorithm combines methods from Deep Learning and Program Synthesis fields by designing rich domain-specific language (DSL) and defining efficient search algorithm guided by a Seq2Tree model on it.', 'To evaluate the quality of the approach we also present a semi-synthetic dataset of descriptions with test examples and corresponding programs.', 'We show that our algorithm significantly outperforms sequence-to-sequence model with attention baseline.']\"" + ] + }, + "execution_count": 91, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[612]" + ] + }, + { + "cell_type": "code", + "execution_count": 336, + "id": "f541271c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'We present a Neural Program Search, an algorithm to generate programs from natural language description and a small number of input output examples.The algorithm combines methods from Deep Learning and Program Synthesis fields by designing rich domain-specific language and defining efficient search algorithm guided by a Seq2Tree model on it.To evaluate the quality of the approach we also present a semi-synthetic dataset of descriptions with test examples and corresponding programs.We show that our algorithm significantly outperforms sequence-to-sequence model with attention baseline.'" + ] + }, + "execution_count": 336, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[612]" + ] + }, + { + "cell_type": "code", + "execution_count": 93, + "id": "39879ba0", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'[\\'Designing a metric manually for unsupervised sequence generation tasks, such as text generation, is essentially difficult.\\', \\'In a such situation, learning a metric of a sequence from data is one possible solution.\\', \\'The previous study, SeqGAN, proposed the framework for unsupervised sequence generation, in which a metric is learned from data, and a generator is optimized with regard to the learned metric with policy gradient, inspired by generative adversarial nets (GANs) and reinforcement learning.\\', \"In this paper, we make two proposals to learn better metric than SeqGAN\\'s: partial reward function and expert-based reward function training.\", \\'The partial reward function is a reward function for a partial sequence of a certain length.\\', \\'SeqGAN employs a reward function for completed sequence only.\\', \\'By combining long-scale and short-scale partial reward functions, we expect a learned metric to be able to evaluate a partial correctness as well as a coherence of a sequence, as a whole.\\', \\'In expert-based reward function training, a reward function is trained to discriminate between an expert (or true) sequence and a fake sequence that is produced by editing an expert sequence.\\', \\'Expert-based reward function training is not a kind of GAN frameworks.\\', \\'This makes the optimization of the generator easier.\\', \\'We examine the effect of the partial reward function and expert-based reward function training on synthetic data and real text data, and show improvements over SeqGAN and the model trained with MLE.\\', \\'Specifically, whereas SeqGAN gains 0.42 improvement of NLL over MLE on synthetic data, our best model gains 3.02 improvement, and whereas SeqGAN gains 0.029 improvement of BLEU over MLE, our best model gains 0.250 improvement.\\']'" + ] + }, + "execution_count": 93, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[615]" + ] + }, + { + "cell_type": "code", + "execution_count": 337, + "id": "a79b5336", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"Designing a metric manually for unsupervised sequence generation tasks, such as text generation, is essentially difficult.In a such situation, learning a metric of a sequence from data is one possible solution.The previous study, SeqGAN, proposed the framework for unsupervised sequence generation, in which a metric is learned from data, and a generator is optimized with regard to the learned metric with policy gradient, inspired by generative adversarial nets and reinforcement learning.In this paper, we make two proposals to learn better metric than SeqGAN's: partial reward function and expert-based reward function training.The partial reward function is a reward function for a partial sequence of a certain length.SeqGAN employs a reward function for completed sequence only.By combining long-scale and short-scale partial reward functions, we expect a learned metric to be able to evaluate a partial correctness as well as a coherence of a sequence, as a whole.In expert-based reward function training, a reward function is trained to discriminate between an expert sequence and a fake sequence that is produced by editing an expert sequence.Expert-based reward function training is not a kind of GAN frameworks.This makes the optimization of the generator easier.We examine the effect of the partial reward function and expert-based reward function training on synthetic data and real text data, and show improvements over SeqGAN and the model trained with MLE.Specifically, whereas SeqGAN gains 0.42 improvement of NLL over MLE on synthetic data, our best model gains 3.02 improvement, and whereas SeqGAN gains 0.029 improvement of BLEU over MLE, our best model gains 0.250 improvement.\"" + ] + }, + "execution_count": 337, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[615] " + ] + }, + { + "cell_type": "code", + "execution_count": 95, + "id": "dd401c6e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['Recently several different deep learning architectures have been proposed that take a string of characters as the raw input signal and automatically derive features for text classification.', 'Little studies are available that compare the effectiveness of these approaches for character based text classification with each other.', 'In this paper we perform such an empirical comparison for the important cybersecurity problem of DGA detection: classifying domain names as either benign vs. produced by malware (i.e., by a Domain Generation Algorithm).', 'Training and evaluating on a dataset with 2M domain names shows that there is surprisingly little difference between various convolutional neural network (CNN) and recurrent neural network (RNN) based architectures in terms of accuracy, prompting a preference for the simpler architectures, since they are faster to train and less prone to overfitting.']\"" + ] + }, + "execution_count": 95, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[5]" + ] + }, + { + "cell_type": "code", + "execution_count": 338, + "id": "7d41dad3", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Recently several different deep learning architectures have been proposed that take a string of characters as the raw input signal and automatically derive features for text classification.Little studies are available that compare the effectiveness of these approaches for character based text classification with each other.In this paper we perform such an empirical comparison for the important cybersecurity problem of DGA detection: classifying domain names as either benign vs. produced by malware.Training and evaluating on a dataset with 2M domain names shows that there is surprisingly little difference between various convolutional neural network and recurrent neural network based architectures in terms of accuracy, prompting a preference for the simpler architectures, since they are faster to train and less prone to overfitting.'" + ] + }, + "execution_count": 338, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[5]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5ef09749", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "raw", + "id": "245ae84f", + "metadata": {}, + "source": [ + "r'\\{[^}]*\\}'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "aa68ec6f", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 98, + "id": "ac749788", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['Although variational autoencoders (VAEs) represent a widely influential deep generative model, many aspects of the underlying energy function remain poorly understood.', ' In particular, it is commonly believed that Gaussian encoder/decoder assumptions reduce the effectiveness of VAEs in generating realistic samples', '. In this regard, we rigorously analyze the VAE objective, differentiating situations where this belief is and is not actually true', '. We then leverage the corresponding insights to develop a simple VAE enhancement that requires no additional hyperparameters or sensitive tuning', '. Quantitatively, this proposal produces crisp samples and stable FID scores that are actually competitive with a variety of GAN models, all while retaining desirable attributes of the original VAE architecture', '. The code for our model is available at \\\\\\\\url{https://github.com/daib13/TwoStageVAE}.']\"" + ] + }, + "execution_count": 98, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[19]" + ] + }, + { + "cell_type": "code", + "execution_count": 339, + "id": "32aa4ea2", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Although variational autoencoders represent a widely influential deep generative model, many aspects of the underlying energy function remain poorly understood. In particular, it is commonly believed that Gaussian encoder/decoder assumptions reduce the effectiveness of VAEs in generating realistic samples. In this regard, we rigorously analyze the VAE objective, differentiating situations where this belief is and is not actually true. We then leverage the corresponding insights to develop a simple VAE enhancement that requires no additional hyperparameters or sensitive tuning. Quantitatively, this proposal produces crisp samples and stable FID scores that are actually competitive with a variety of GAN models, all while retaining desirable attributes of the original VAE architecture. The code for our model is available at .'" + ] + }, + "execution_count": 339, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[19] " + ] + }, + { + "cell_type": "code", + "execution_count": 100, + "id": "d7050812", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['Mini-batch stochastic gradient descent (SGD) is state of the art in large scale distributed training.', 'The scheme can reach a linear speed-up with respect to the number of workers, but this is rarely seen in practice as the scheme often suffers from large network delays and bandwidth limits.', 'To overcome this communication bottleneck recent works propose to reduce the communication frequency.', 'An algorithm of this type is local SGD that runs SGD independently in parallel on different workers and averages the sequences only once in a while.', 'This scheme shows promising results in practice, but eluded thorough theoretical analysis.\\\\n \\\\n', 'We prove concise convergence rates for local SGD on convex problems and show that it converges at the same rate as mini-batch SGD in terms of number of evaluated gradients, that is, the scheme achieves linear speed-up in the number of workers and mini-batch size.', 'The number of communication rounds can be reduced up to a factor of T^{1/2}---where T denotes the number of total steps---compared to mini-batch SGD.', 'This also holds for asynchronous implementations.\\\\n\\\\n', 'Local SGD can also be used for large scale training of deep learning models.', 'The results shown here aim serving as a guideline to further explore the theoretical and practical aspects of local SGD in these applications.']\"" + ] + }, + "execution_count": 100, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[117]" + ] + }, + { + "cell_type": "code", + "execution_count": 340, + "id": "625e4fd5", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Mini-batch stochastic gradient descent is state of the art in large scale distributed training.The scheme can reach a linear speed-up with respect to the number of workers, but this is rarely seen in practice as the scheme often suffers from large network delays and bandwidth limits.To overcome this communication bottleneck recent works propose to reduce the communication frequency.An algorithm of this type is local SGD that runs SGD independently in parallel on different workers and averages the sequences only once in a while.This scheme shows promising results in practice, but eluded thorough theoretical analysis. We prove concise convergence rates for local SGD on convex problems and show that it converges at the same rate as mini-batch SGD in terms of number of evaluated gradients, that is, the scheme achieves linear speed-up in the number of workers and mini-batch size.The number of communication rounds can be reduced up to a factor of T^ where T denotes the number of total steps compared to mini-batch SGD.This also holds for asynchronous implementations.Local SGD can also be used for large scale training of deep learning models.The results shown here aim serving as a guideline to further explore the theoretical and practical aspects of local SGD in these applications.'" + ] + }, + "execution_count": 340, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[117]" + ] + }, + { + "cell_type": "code", + "execution_count": 102, + "id": "21090843", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['Deep learning is becoming more widespread in its application due to its power in solving complex classification problems.', 'However, deep learning models often require large memory and energy consumption, which may prevent them from being deployed effectively on embedded platforms, limiting their applications.', 'This work addresses the problem by proposing methods {\\\\\\\\em Weight Reduction Quantisation} for compressing the memory footprint of the models, including reducing the number of weights and the number of bits to store each weight.', 'Beside, applying with sparsity-inducing regularization, our work focuses on speeding up stochastic variance reduced gradients (SVRG) optimization on non-convex problem.', 'Our method that mini-batch SVRG with $\\\\\\\\ell$1 regularization on non-convex problem has faster and smoother convergence rates than SGD by using adaptive learning rates.', 'Experimental evaluation of our approach uses MNIST and CIFAR-10 datasets on LeNet-300-100 and LeNet-5 models, showing our approach can reduce the memory requirements both in the convolutional and fully connected layers by up to 60$\\\\\\\\times$ without affecting their test accuracy.']\"" + ] + }, + "execution_count": 102, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[407]" + ] + }, + { + "cell_type": "code", + "execution_count": 341, + "id": "4c308625", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Deep learning is becoming more widespread in its application due to its power in solving complex classification problems.However, deep learning models often require large memory and energy consumption, which may prevent them from being deployed effectively on embedded platforms, limiting their applications.This work addresses the problem by proposing methods for compressing the memory footprint of the models, including reducing the number of weights and the number of bits to store each weight.Beside, applying with sparsity-inducing regularization, our work focuses on speeding up stochastic variance reduced gradients optimization on non-convex problem.Our method that mini-batch SVRG with1 regularization on non-convex problem has faster and smoother convergence rates than SGD by using adaptive learning rates.Experimental evaluation of our approach uses MNIST and CIFAR-10 datasets on LeNet-300-100 and LeNet-5 models, showing our approach can reduce the memory requirements both in the convolutional and fully connected layers by up to 60 without affecting their test accuracy.'" + ] + }, + "execution_count": 341, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[407]" + ] + }, + { + "cell_type": "code", + "execution_count": 104, + "id": "22639e4b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['Generative models are important tools to capture and investigate the properties of complex empirical data.', 'Recent developments such as Generative Adversarial Networks (GANs) and Variational Auto-Encoders (VAEs) use two very similar, but \\\\\\\\textit{reverse}, deep convolutional architectures, one to generate and one to extract information from data.', 'Does learning the parameters of both architectures obey the same rules?', 'We exploit the causality principle of independence of mechanisms to quantify how the weights of successive layers adapt to each other.', 'Using the recently introduced Spectral Independence Criterion, we quantify the dependencies between the kernels of successive convolutional layers and show that those are more independent for the generative process than for information extraction, in line with results from the field of causal inference.', 'In addition, our experiments on generation of human faces suggest that more independence between successive layers of generators results in improved performance of these architectures.\\\\n']\"" + ] + }, + "execution_count": 104, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[580]" + ] + }, + { + "cell_type": "code", + "execution_count": 342, + "id": "60d7ef6f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Generative models are important tools to capture and investigate the properties of complex empirical data.Recent developments such as Generative Adversarial Networks and Variational Auto-Encoders use two very similar, but , deep convolutional architectures, one to generate and one to extract information from data.Does learning the parameters of both architectures obey the same rules?We exploit the causality principle of independence of mechanisms to quantify how the weights of successive layers adapt to each other.Using the recently introduced Spectral Independence Criterion, we quantify the dependencies between the kernels of successive convolutional layers and show that those are more independent for the generative process than for information extraction, in line with results from the field of causal inference.In addition, our experiments on generation of human faces suggest that more independence between successive layers of generators results in improved performance of these architectures.'" + ] + }, + "execution_count": 342, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[580] " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bb24cce3", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "raw", + "id": "ef597d46", + "metadata": {}, + "source": [ + "r'\\$[^$]*\\$'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "33491dd4", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 106, + "id": "87850757", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'[\\'The top-$k$ error is a common measure of performance in machine learning and computer vision.\\', \\'In practice, top-$k$ classification is typically performed with deep neural networks trained with the cross-entropy loss.\\', \\'Theoretical results indeed suggest that cross-entropy is an optimal learning objective for such a task in the limit of infinite data.\\', \\'In the context of limited and noisy data however, the use of a loss function that is specifically designed for top-$k$ classification can bring significant improvements.\\\\n\\', \\'Our empirical evidence suggests that the loss function must be smooth and have non-sparse gradients in order to work well with deep neural networks.\\', \\'Consequently, we introduce a family of smoothed loss functions that are suited to top-$k$ optimization via deep learning.\\', \\'The widely used cross-entropy is a special case of our family.\\', \\'Evaluating our smooth loss functions is computationally challenging: a na{\\\\\\\\\"i}ve algorithm would require $\\\\\\\\mathcal{O}(\\\\\\\\binom{n}{k})$ operations, where $n$ is the number of classes.\\', \\'Thanks to a connection to polynomial algebra and a divide-and-conquer approach, we provide an algorithm with a time complexity of $\\\\\\\\mathcal{O}(k n)$.\\', \\'Furthermore, we present a novel approximation to obtain fast and stable algorithms on GPUs with single floating point precision.\\', \\'We compare the performance of the cross-entropy loss and our margin-based losses in various regimes of noise and data size, for the predominant use case of $k=5$.\\', \\'Our investigation reveals that our loss is more robust to noise and overfitting than cross-entropy.\\']'" + ] + }, + "execution_count": 106, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[23]" + ] + }, + { + "cell_type": "code", + "execution_count": 343, + "id": "962284e7", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'The top- error is a common measure of performance in machine learning and computer vision.In practice, top- classification is typically performed with deep neural networks trained with the cross-entropy loss.Theoretical results indeed suggest that cross-entropy is an optimal learning objective for such a task in the limit of infinite data.In the context of limited and noisy data however, the use of a loss function that is specifically designed for top- classification can bring significant improvements.Our empirical evidence suggests that the loss function must be smooth and have non-sparse gradients in order to work well with deep neural networks.Consequently, we introduce a family of smoothed loss functions that are suited to top- optimization via deep learning.The widely used cross-entropy is a special case of our family.Evaluating our smooth loss functions is computationally challenging: a nave algorithm would require operations, where is the number of classes.Thanks to a connection to polynomial algebra and a divide-and-conquer approach, we provide an algorithm with a time complexity of.Furthermore, we present a novel approximation to obtain fast and stable algorithms on GPUs with single floating point precision.We compare the performance of the cross-entropy loss and our margin-based losses in various regimes of noise and data size, for the predominant use case of.Our investigation reveals that our loss is more robust to noise and overfitting than cross-entropy.'" + ] + }, + "execution_count": 343, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[23]" + ] + }, + { + "cell_type": "code", + "execution_count": 108, + "id": "8c743db2", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['Techniques such as ensembling and distillation promise model quality improvements when paired with almost any base model.', 'However, due to increased test-time cost (for ensembles) and increased complexity of the training pipeline (for distillation), these techniques are challenging to use in industrial settings.', 'In this paper we explore a variant of distillation which is relatively straightforward to use as it does not require a complicated multi-stage setup or many new hyperparameters.', 'Our first claim is that online distillation enables us to use extra parallelism to fit very large datasets about twice as fast.', 'Crucially, we can still speed up training even after we have already reached the point at which additional parallelism provides no benefit for synchronous or asynchronous stochastic gradient descent.', 'Two neural networks trained on disjoint subsets of the data can share knowledge by encouraging each model to agree with the predictions the other model would have made.', 'These predictions can come from a stale version of the other model so they can be safely computed using weights that only rarely get transmitted.', 'Our second claim is that online distillation is a cost-effective way to make the exact predictions of a model dramatically more reproducible.', 'We support our claims using experiments on the Criteo Display Ad Challenge dataset, ImageNet, and the largest to-date dataset used for neural language modeling, containing $6\\\\\\\\times 10^{11}$ tokens and based on the Common Crawl repository of web data.']\"" + ] + }, + "execution_count": 108, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[73]" + ] + }, + { + "cell_type": "code", + "execution_count": 344, + "id": "f9b25d12", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Techniques such as ensembling and distillation promise model quality improvements when paired with almost any base model.However, due to increased test-time cost and increased complexity of the training pipeline, these techniques are challenging to use in industrial settings.In this paper we explore a variant of distillation which is relatively straightforward to use as it does not require a complicated multi-stage setup or many new hyperparameters.Our first claim is that online distillation enables us to use extra parallelism to fit very large datasets about twice as fast.Crucially, we can still speed up training even after we have already reached the point at which additional parallelism provides no benefit for synchronous or asynchronous stochastic gradient descent.Two neural networks trained on disjoint subsets of the data can share knowledge by encouraging each model to agree with the predictions the other model would have made.These predictions can come from a stale version of the other model so they can be safely computed using weights that only rarely get transmitted.Our second claim is that online distillation is a cost-effective way to make the exact predictions of a model dramatically more reproducible.We support our claims using experiments on the Criteo Display Ad Challenge dataset, ImageNet, and the largest to-date dataset used for neural language modeling, containing tokens and based on the Common Crawl repository of web data.'" + ] + }, + "execution_count": 344, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[73]" + ] + }, + { + "cell_type": "code", + "execution_count": 110, + "id": "83b2fcbf", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['With the recently rapid development in deep learning, deep neural networks have been widely adopted in many real-life applications.', 'However, deep neural networks are also known to have very little control over its uncertainty for test examples, which potentially causes very harmful and annoying consequences in practical scenarios.', 'In this paper, we are particularly interested in designing a higher-order uncertainty metric for deep neural networks and investigate its performance on the out-of-distribution detection task proposed by~\\\\\\\\cite{hendrycks2016baseline}.', 'Our method first assumes there exists a underlying higher-order distribution $\\\\\\\\mathcal{P}(z)$', ', which generated label-wise distribution $\\\\\\\\mathcal{P}(y)$', 'over classes on the K-dimension simplex, and then approximate such higher-order distribution via parameterized posterior function $p_{\\\\\\\\theta}(z|x)$ under variational inference framework, finally we use the entropy of learned posterior distribution $p_{\\\\\\\\theta}(z|x)$ as uncertainty measure to detect out-of-distribution examples. However', ', we identify the overwhelming over-concentration issue in such a framework, which greatly hinders the detection performance. Therefore', ', we further design a log-smoothing function to alleviate such issue to greatly increase the robustness of the proposed entropy-based uncertainty measure. Through', 'comprehensive experiments on various datasets and architectures, our proposed variational Dirichlet framework with entropy-based uncertainty measure is consistently observed to yield significant improvements over many baseline systems.']\"" + ] + }, + "execution_count": 110, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[194]" + ] + }, + { + "cell_type": "code", + "execution_count": 345, + "id": "04229794", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'With the recently rapid development in deep learning, deep neural networks have been widely adopted in many real-life applications.However, deep neural networks are also known to have very little control over its uncertainty for test examples, which potentially causes very harmful and annoying consequences in practical scenarios.In this paper, we are particularly interested in designing a higher-order uncertainty metric for deep neural networks and investigate its performance on the out-of-distribution detection task proposed by~.Our method first assumes there exists a underlying higher-order distribution, which generated label-wise distributionover classes on the K-dimension simplex, and then approximate such higher-order distribution via parameterized posterior function under variational inference framework, finally we use the entropy of learned posterior distribution as uncertainty measure to detect out-of-distribution examples. However, we identify the overwhelming over-concentration issue in such a framework, which greatly hinders the detection performance. Therefore, we further design a log-smoothing function to alleviate such issue to greatly increase the robustness of the proposed entropy-based uncertainty measure. Throughcomprehensive experiments on various datasets and architectures, our proposed variational Dirichlet framework with entropy-based uncertainty measure is consistently observed to yield significant improvements over many baseline systems.'" + ] + }, + "execution_count": 345, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[194] " + ] + }, + { + "cell_type": "code", + "execution_count": 112, + "id": "3c3c158e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'[\\'We propose a practical method for $L_0$ norm regularization for neural networks: pruning the network during training by encouraging weights to become exactly zero.\\', \\'Such regularization is interesting since (1) it can greatly speed up training and inference, and (2) it can improve generalization.\\', \\'AIC and BIC, well-known model selection criteria, are special cases of $L_0$ regularization.\\', \\'However, since the $L_0$ norm of weights is non-differentiable, we cannot incorporate it directly as a regularization term in the objective function.\\', \\'We propose a solution through the inclusion of a collection of non-negative stochastic gates, which collectively determine which weights to set to zero.\\', \\'We show that, somewhat surprisingly, for certain distributions over the gates, the expected $L_0$ regularized objective is differentiable with respect to the distribution parameters.\\', \"We further propose the \\\\\\\\emph{hard concrete} distribution for the gates, which is obtained by ``stretching\\'\\' a binary concrete distribution and then transforming its samples with a hard-sigmoid.\", \\'The parameters of the distribution over the gates can then be jointly optimized with the original network parameters.\\', \\'As a result our method allows for straightforward and efficient learning of model structures with stochastic gradient descent and allows for conditional computation in a principled way.\\', \\'We perform various experiments to demonstrate the effectiveness of the resulting approach and regularizer.\\']'" + ] + }, + "execution_count": 112, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[400]" + ] + }, + { + "cell_type": "code", + "execution_count": 346, + "id": "67f0c3a5", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"We propose a practical method for norm regularization for neural networks: pruning the network during training by encouraging weights to become exactly zero.Such regularization is interesting since it can greatly speed up training and inference, and it can improve generalization.AIC and BIC, well-known model selection criteria, are special cases of regularization.However, since the norm of weights is non-differentiable, we cannot incorporate it directly as a regularization term in the objective function.We propose a solution through the inclusion of a collection of non-negative stochastic gates, which collectively determine which weights to set to zero.We show that, somewhat surprisingly, for certain distributions over the gates, the expected regularized objective is differentiable with respect to the distribution parameters.We further propose the distribution for the gates, which is obtained by stretching'' a binary concrete distribution and then transforming its samples with a hard-sigmoid.The parameters of the distribution over the gates can then be jointly optimized with the original network parameters.As a result our method allows for straightforward and efficient learning of model structures with stochastic gradient descent and allows for conditional computation in a principled way.We perform various experiments to demonstrate the effectiveness of the resulting approach and regularizer.\"" + ] + }, + "execution_count": 346, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[400] " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "972251ab", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "raw", + "id": "2f22ec5a", + "metadata": {}, + "source": [ + "\\\\\\\\" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e813fb08", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 114, + "id": "301c6598", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['The task of Reading Comprehension with Multiple Choice Questions, requires a human (or machine) to read a given \\\\\\\\{\\\\\\\\textit{passage, question}\\\\\\\\} pair and select one of the $n$ given options.', 'The current state of the art model for this task first computes a query-aware representation for the passage and then \\\\\\\\textit{selects} the option which has the maximum similarity with this representation.', 'However, when humans perform this task they do not just focus on option selection but use a combination of \\\\\\\\textit{elimination} and \\\\\\\\textit{selection}. Specifically, a human would first try to eliminate the most irrelevant option and then read the document again in the light of this new information (and perhaps ignore portions corresponding to the eliminated option).', 'This process could be repeated multiple times till the reader is finally ready to select the correct option.', 'We propose \\\\\\\\textit{ElimiNet}, a neural network based model which tries to mimic this process.', 'Specifically, it has gates which decide whether an option can be eliminated given the \\\\\\\\{\\\\\\\\textit{document, question}\\\\\\\\} pair and if so it tries to make the document representation orthogonal to this eliminatedd option (akin to ignoring portions of the document corresponding to the eliminated option).', 'The model makes multiple rounds of partial elimination to refine the document representation and finally uses a selection module to pick the best option.', 'We evaluate our model on the recently released large scale RACE dataset and show that it outperforms the current state of the art model on 7 out of the 13 question types in this dataset.', 'Further we show that taking an ensemble of our \\\\\\\\textit{elimination-selection} based method with a \\\\\\\\textit{selection} based method gives us an improvement of 7\\\\\\\\% (relative) over the best reported performance on this dataset. \\\\n']\"" + ] + }, + "execution_count": 114, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[64]" + ] + }, + { + "cell_type": "code", + "execution_count": 347, + "id": "a2f31e2d", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'The task of Reading Comprehension with Multiple Choice Questions, requires a human to read a given } pair and select one of the given options.The current state of the art model for this task first computes a query-aware representation for the passage and then the option which has the maximum similarity with this representation.However, when humans perform this task they do not just focus on option selection but use a combination of and . Specifically, a human would first try to eliminate the most irrelevant option and then read the document again in the light of this new information.This process could be repeated multiple times till the reader is finally ready to select the correct option.We propose , a neural network based model which tries to mimic this process.Specifically, it has gates which decide whether an option can be eliminated given the } pair and if so it tries to make the document representation orthogonal to this eliminatedd option.The model makes multiple rounds of partial elimination to refine the document representation and finally uses a selection module to pick the best option.We evaluate our model on the recently released large scale RACE dataset and show that it outperforms the current state of the art model on 7 out of the 13 question types in this dataset.Further we show that taking an ensemble of our based method with a based method gives us an improvement of 7% over the best reported performance on this dataset. '" + ] + }, + "execution_count": 347, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[64] " + ] + }, + { + "cell_type": "code", + "execution_count": 116, + "id": "3b506a3a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'[\\'RMSProp and ADAM continue to be extremely popular algorithms for training neural nets but their theoretical convergence properties have remained unclear.\\', \\'Further, recent work has seemed to suggest that these algorithms have worse generalization properties when compared to carefully tuned stochastic gradient descent or its momentum variants.\\', \\'In this work, we make progress towards a deeper understanding of ADAM and RMSProp in two ways.\\', \\'First, we provide proofs that these adaptive gradient algorithms are guaranteed to reach criticality for smooth non-convex objectives, and we give bounds on the running time.\\\\n\\\\n\\', \"Next we design experiments to empirically study the convergence and generalization properties of RMSProp and ADAM against Nesterov\\'s Accelerated Gradient method on a variety of common autoencoder setups and on VGG-9 with CIFAR-10.\", \\'Through these experiments we demonstrate the interesting sensitivity that ADAM has to its momentum parameter \\\\\\\\beta_1.\\', \\'We show that at very high values of the momentum parameter (\\\\\\\\beta_1 = 0.99) ADAM outperforms a carefully tuned NAG on most of our experiments, in terms of getting lower training and test losses.\\', \"On the other hand, NAG can sometimes do better when ADAM\\'s \\\\\\\\beta_1 is set to the most commonly used value: \\\\\\\\beta_1 = 0.9, indicating the importance of tuning the hyperparameters of ADAM to get better generalization performance.\\\\n\\\\n\", \\'We also report experiments on different autoencoders to demonstrate that NAG has better abilities in terms of reducing the gradient norms, and it also produces iterates which exhibit an increasing trend for the minimum eigenvalue of the Hessian of the loss function at the iterates.\\']'" + ] + }, + "execution_count": 116, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[191]" + ] + }, + { + "cell_type": "code", + "execution_count": 348, + "id": "f9fa13ba", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"RMSProp and ADAM continue to be extremely popular algorithms for training neural nets but their theoretical convergence properties have remained unclear.Further, recent work has seemed to suggest that these algorithms have worse generalization properties when compared to carefully tuned stochastic gradient descent or its momentum variants.In this work, we make progress towards a deeper understanding of ADAM and RMSProp in two ways.First, we provide proofs that these adaptive gradient algorithms are guaranteed to reach criticality for smooth non-convex objectives, and we give bounds on the running time.Next we design experiments to empirically study the convergence and generalization properties of RMSProp and ADAM against Nesterov's Accelerated Gradient method on a variety of common autoencoder setups and on VGG-9 with CIFAR-10.Through these experiments we demonstrate the interesting sensitivity that ADAM has to its momentum parameter beta_1.We show that at very high values of the momentum parameter ADAM outperforms a carefully tuned NAG on most of our experiments, in terms of getting lower training and test losses.On the other hand, NAG can sometimes do better when ADAM's beta_1 is set to the most commonly used value: beta_1 = 0.9, indicating the importance of tuning the hyperparameters of ADAM to get better generalization performance.We also report experiments on different autoencoders to demonstrate that NAG has better abilities in terms of reducing the gradient norms, and it also produces iterates which exhibit an increasing trend for the minimum eigenvalue of the Hessian of the loss function at the iterates.\"" + ] + }, + "execution_count": 348, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[191]" + ] + }, + { + "cell_type": "code", + "execution_count": 118, + "id": "d297283e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['Generative models are important tools to capture and investigate the properties of complex empirical data.', 'Recent developments such as Generative Adversarial Networks (GANs) and Variational Auto-Encoders (VAEs) use two very similar, but \\\\\\\\textit{reverse}, deep convolutional architectures, one to generate and one to extract information from data.', 'Does learning the parameters of both architectures obey the same rules?', 'We exploit the causality principle of independence of mechanisms to quantify how the weights of successive layers adapt to each other.', 'Using the recently introduced Spectral Independence Criterion, we quantify the dependencies between the kernels of successive convolutional layers and show that those are more independent for the generative process than for information extraction, in line with results from the field of causal inference.', 'In addition, our experiments on generation of human faces suggest that more independence between successive layers of generators results in improved performance of these architectures.\\\\n']\"" + ] + }, + "execution_count": 118, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[580]" + ] + }, + { + "cell_type": "code", + "execution_count": 349, + "id": "66a8443d", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Generative models are important tools to capture and investigate the properties of complex empirical data.Recent developments such as Generative Adversarial Networks and Variational Auto-Encoders use two very similar, but , deep convolutional architectures, one to generate and one to extract information from data.Does learning the parameters of both architectures obey the same rules?We exploit the causality principle of independence of mechanisms to quantify how the weights of successive layers adapt to each other.Using the recently introduced Spectral Independence Criterion, we quantify the dependencies between the kernels of successive convolutional layers and show that those are more independent for the generative process than for information extraction, in line with results from the field of causal inference.In addition, our experiments on generation of human faces suggest that more independence between successive layers of generators results in improved performance of these architectures.'" + ] + }, + "execution_count": 349, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.Clean_Text[580]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0467fd2c", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06bc0e0a", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "a8e750de", + "metadata": {}, + "source": [ + "### Remove Text -- > Test" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c2c28ab4", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 235, + "id": "40786d55", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\([^)]*\\)')\n", + "\n", + "for index, row in NN_SciTLDR_test.iterrows(): \n", + " row['Clean_Text'] = pattern.sub(r'\\0',row['Clean_Text'])\n", + "\n", + " \n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('\\x00',''))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 236, + "id": "b5110e64", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\{[^}]*\\}')\n", + "\n", + "for index, row in NN_SciTLDR_test.iterrows(): \n", + " row['Clean_Text'] = pattern.sub(r'\\0',row['Clean_Text'])\n", + "\n", + " \n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('\\x00',''))" + ] + }, + { + "cell_type": "code", + "execution_count": 237, + "id": "359edb8e", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\$[^$]*\\$')\n", + "\n", + "for index, row in NN_SciTLDR_test.iterrows(): \n", + " row['Clean_Text'] = pattern.sub(r'\\0',row['Clean_Text'])\n", + "\n", + " \n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('\\x00','')) " + ] + }, + { + "cell_type": "code", + "execution_count": 238, + "id": "d4e9390e", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\', \\'')\n", + "\n", + "for index, row in NN_SciTLDR_test.iterrows(): \n", + " row['Clean_Text'] = pattern.sub(r'\\0',row['Clean_Text'])\n", + "\n", + " \n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('\\x00','')) " + ] + }, + { + "cell_type": "code", + "execution_count": 239, + "id": "63dda1c7", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\\\n')\n", + "\n", + "for index, row in NN_SciTLDR_test.iterrows(): \n", + " row['Clean_Text'] = pattern.sub(r'\\0',row['Clean_Text'])\n", + "\n", + " \n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('\\x00','')) " + ] + }, + { + "cell_type": "code", + "execution_count": 240, + "id": "719804e6", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('\\']','@]'))\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('[\\'','@['))\n", + "\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('\"]','@]'))\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('[\"','@['))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 241, + "id": "c91ff000", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('@]',''))\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('@[',''))\n", + "\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('@]',''))\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('@[',''))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 242, + "id": "f6a19a71", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('.\\', \\'','.'))" + ] + }, + { + "cell_type": "code", + "execution_count": 327, + "id": "19596ae1", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('``',''))\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('---',' '))\n", + "\n", + "\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('\\\\\\\\url',''))\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace(' /',''))\n", + "\n", + "\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('.\", \\'','.'))\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('.\\', \"','.'))" + ] + }, + { + "cell_type": "code", + "execution_count": 557, + "id": "912fb841", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 558, + "id": "badc50f2", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 335, + "id": "77a8a0be", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace(\"\\\\\\\\cite\",''))\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace(\"\\\\\\\\emph\",''))\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace(\"\\\\t\",''))\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace(\"\\\\\\\\textit\",''))\n", + "\n", + "\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('\\\\\\\\%','%'))\n", + "\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace('extit','@'))\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace(\"\\\\@\",''))\n", + "\n", + "\n", + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace(\"\\\\\\\\\",''))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 560, + "id": "7d69a531", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 351, + "id": "79260543", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_test[\"Clean_Text\"] = NN_SciTLDR_test[\"Clean_Text\"].apply(lambda x: x.replace(' ',' '))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 566, + "id": "804a6a67", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "70378cd5", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "39b99ea0", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "8f0af0f3", + "metadata": {}, + "source": [ + "### N_SciTLDR_dev" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f807d4c6", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 147, + "id": "4785ef41", + "metadata": {}, + "outputs": [], + "source": [ + "# pattern = re.compile(r'\\\\\\\\')\n", + "\n", + "# df0 = NN_SciTLDR_dev[NN_SciTLDR_dev['Clean_Text'].str.contains(pattern)]\n", + "# df0" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "834b5627", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "raw", + "id": "ee87d00b", + "metadata": {}, + "source": [ + "r'\\([^)]*\\)'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c23f600e", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 121, + "id": "c6bd31fd", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['It is important to collect credible training samples $(x,y)$ for building data-intensive learning systems (e.g., a deep learning system).', 'In the literature, there is a line of studies on eliciting distributional information from self-interested agents who hold a relevant information. ', 'Asking people to report complex distribution $p(x)$, though theoretically viable, is challenging in practice.', 'This is primarily due to the heavy cognitive loads required for human agents to reason and report this high dimensional information.', 'Consider the example where we are interested in building an image classifier via first collecting a certain category of high-dimensional image data.', 'While classical elicitation results apply to eliciting a complex and generative (and continuous) distribution $p(x)$ for this image data, we are interested in eliciting samples $x_i \\\\\\\\sim p(x)$ from agents.', 'This paper introduces a deep learning aided method to incentivize credible sample contributions from selfish and rational agents.', 'The challenge to do so is to design an incentive-compatible score function to score each reported sample to induce truthful reports, instead of an arbitrary or even adversarial one.', 'We show that with accurate estimation of a certain $f$-divergence function we are able to achieve approximate incentive compatibility in eliciting truthful samples.', 'We then present an efficient estimator with theoretical guarantee via studying the variational forms of $f$-divergence function.', 'Our work complements the literature of information elicitation via introducing the problem of \\\\\\\\emph{sample elicitation}. We also show a connection between this sample elicitation problem and $f$-GAN, and how this connection can help reconstruct an estimator of the distribution based on collected samples.']\"" + ] + }, + "execution_count": 121, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[8]" + ] + }, + { + "cell_type": "code", + "execution_count": 361, + "id": "74466df4", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'It is important to collect credible training samples for building data-intensive learning systems.In the literature, there is a line of studies on eliciting distributional information from self-interested agents who hold a relevant information. Asking people to report complex distribution, though theoretically viable, is challenging in practice.This is primarily due to the heavy cognitive loads required for human agents to reason and report this high dimensional information.Consider the example where we are interested in building an image classifier via first collecting a certain category of high-dimensional image data.While classical elicitation results apply to eliciting a complex and generative distribution for this image data, we are interested in eliciting samples from agents.This paper introduces a deep learning aided method to incentivize credible sample contributions from selfish and rational agents.The challenge to do so is to design an incentive-compatible score function to score each reported sample to induce truthful reports, instead of an arbitrary or even adversarial one.We show that with accurate estimation of a certain-divergence function we are able to achieve approximate incentive compatibility in eliciting truthful samples.We then present an efficient estimator with theoretical guarantee via studying the variational forms of-divergence function.Our work complements the literature of information elicitation via introducing the problem of . We also show a connection between this sample elicitation problem and-GAN, and how this connection can help reconstruct an estimator of the distribution based on collected samples.'" + ] + }, + "execution_count": 361, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[8]" + ] + }, + { + "cell_type": "code", + "execution_count": 123, + "id": "861e8ec8", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'[\\'Characterization of the representations learned in intermediate layers of deep networks can provide valuable insight into the nature of a task and can guide the development of well-tailored learning strategies.\\', \\'Here we study convolutional neural network-based acoustic models in the context of automatic speech recognition.\\', \\'Adapting a method proposed by Yosinski et al. [2014], we measure the transferability of each layer between German and English to assess the their language-specifity.\\', \\'We observe three distinct regions of transferability: (1) the first two layers are entirely transferable between languages, (2) layers 2–8 are also highly transferable but we find evidence of some language specificity, (3) the subsequent fully connected layers are more language specific but can be successfully finetuned to the target language.\\', \\'To further probe the effect of weight freezing, we performed follow-up experiments using freeze-training [Raghu et al., 2017].\\', \"Our results are consistent with the observation that CCNs converge \\'bottom up\\' during training and demonstrate the benefit of freeze training, especially for transfer learning.\"]'" + ] + }, + "execution_count": 123, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[613]" + ] + }, + { + "cell_type": "code", + "execution_count": 376, + "id": "c4cb3891", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Characterization of the representations learned in intermediate layers of deep networks can provide valuable insight into the nature of a task and can guide the development of well-tailored learning strategies.Here we study convolutional neural network-based acoustic models in the context of automatic speech recognition.Adapting a method proposed by Yosinski et al. [2014], we measure the transferability of each layer between German and English to assess the their language-specifity.We observe three distinct regions of transferability: the first two layers are entirely transferable between languages, layers 2–8 are also highly transferable but we find evidence of some language specificity, the subsequent fully connected layers are more language specific but can be successfully finetuned to the target language.To further probe the effect of weight freezing, we performed follow-up experiments using freeze-training [Raghu et al., 2017].Our results are consistent with the observation that CCNs converge bottom up during training and demonstrate the benefit of freeze training, especially for transfer learning.'" + ] + }, + "execution_count": 376, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[613] " + ] + }, + { + "cell_type": "code", + "execution_count": 125, + "id": "8f17caee", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['This paper proposes and demonstrates a surprising pattern in the training of neural networks: there is a one to one relation between the values of any pair of losses (such as cross entropy, mean squared error, 0/1 error etc.) evaluated for a model arising at (any point of) a training run.', 'This pattern is universal in the sense that this one to one relationship is identical across architectures (such as VGG, Resnet, Densenet etc.), algorithms (SGD and SGD with momentum) and training loss functions (cross entropy and mean squared error).']\"" + ] + }, + "execution_count": 125, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[618]" + ] + }, + { + "cell_type": "code", + "execution_count": 363, + "id": "3d86b79c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'This paper proposes and demonstrates a surprising pattern in the training of neural networks: there is a one to one relation between the values of any pair of losses evaluated for a model arising at a training run.This pattern is universal in the sense that this one to one relationship is identical across architectures, algorithms and training loss functions.'" + ] + }, + "execution_count": 363, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[618]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3effd487", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "raw", + "id": "17a20637", + "metadata": {}, + "source": [ + "r'\\{[^}]*\\}'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "626d7826", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 128, + "id": "19c54471", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['The universal approximation theorem, in one of its most general versions, says that if we consider only continuous activation functions σ, then a standard feedforward neural network with one hidden layer is able to approximate any continuous multivariate function f to any given approximation threshold ε, if and only if σ is non-polynomial.', 'In this paper, we give a direct algebraic proof of the theorem.', 'Furthermore we shall explicitly quantify the number of hidden units required for approximation.', 'Specifically, if X in R^n is compact, then a neural network with n input units, m output units, and a single hidden layer with {n+d choose d} hidden units (independent of m and ε), can uniformly approximate any polynomial function f:X -> R^m whose total degree is at most d for each of its m coordinate functions.', 'In the general case that f is any continuous function, we show there exists some N in O(ε^{-n}) (independent of m), such that N hidden units would suffice to approximate f.', 'We also show that this uniform approximation property (UAP) still holds even under seemingly strong conditions imposed on the weights.', 'We highlight several consequences:', '(i) For any δ > 0, the UAP still holds if we restrict all non-bias weights w in the last layer to satisfy |w| < δ.', '(ii) There exists some λ>0 (depending only on f and σ), such that the UAP still holds if we restrict all non-bias weights w in the first layer to satisfy |w|>λ.', '(iii) If the non-bias weights in the first layer are *fixed* and randomly chosen from a suitable range, then the UAP holds with probability 1.']\"" + ] + }, + "execution_count": 128, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[73]" + ] + }, + { + "cell_type": "code", + "execution_count": 364, + "id": "b7e0ae17", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'The universal approximation theorem, in one of its most general versions, says that if we consider only continuous activation functions σ, then a standard feedforward neural network with one hidden layer is able to approximate any continuous multivariate function f to any given approximation threshold ε, if and only if σ is non-polynomial.In this paper, we give a direct algebraic proof of the theorem.Furthermore we shall explicitly quantify the number of hidden units required for approximation.Specifically, if X in R^n is compact, then a neural network with n input units, m output units, and a single hidden layer with hidden units, can uniformly approximate any polynomial function f:X -> R^m whose total degree is at most d for each of its m coordinate functions.In the general case that f is any continuous function, we show there exists some N in O, such that N hidden units would suffice to approximate f.We also show that this uniform approximation property still holds even under seemingly strong conditions imposed on the weights.We highlight several consequences: For any δ > 0, the UAP still holds if we restrict all non-bias weights w in the last layer to satisfy |w| < δ. There exists some λ>0, such that the UAP still holds if we restrict all non-bias weights w in the first layer to satisfy |w|>λ. If the non-bias weights in the first layer are *fixed* and randomly chosen from a suitable range, then the UAP holds with probability 1.'" + ] + }, + "execution_count": 364, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[73]" + ] + }, + { + "cell_type": "code", + "execution_count": 130, + "id": "b3178e0b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['Over-parameterization is ubiquitous nowadays in training neural networks to benefit both optimization in seeking global optima and generalization in reducing prediction error.', 'However, compressive networks are desired in many real world applications and direct training of small networks may be trapped in local optima.', 'In this paper, instead of pruning or distilling over-parameterized models to compressive ones, we propose a new approach based on \\\\\\\\emph{differential inclusions of inverse scale spaces}, that generates a family of models from simple to complex ones by coupling gradient descent and mirror descent to explore model structural sparsity.', 'It has a simple discretization, called the Split Linearized Bregman Iteration (SplitLBI), whose global convergence analysis in deep learning is established that from any initializations, algorithmic iterations converge to a critical point of empirical risks.', 'Experimental evidence shows that\\\\\\\\ SplitLBI may achieve state-of-the-art performance in large scale training on ImageNet-2012 dataset etc., while with \\\\\\\\emph{early stopping} it unveils effective subnet architecture with comparable test accuracies to dense models after retraining instead of pruning well-trained ones.']\"" + ] + }, + "execution_count": 130, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[341]" + ] + }, + { + "cell_type": "code", + "execution_count": 365, + "id": "889a5ecb", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Over-parameterization is ubiquitous nowadays in training neural networks to benefit both optimization in seeking global optima and generalization in reducing prediction error.However, compressive networks are desired in many real world applications and direct training of small networks may be trapped in local optima.In this paper, instead of pruning or distilling over-parameterized models to compressive ones, we propose a new approach based on , that generates a family of models from simple to complex ones by coupling gradient descent and mirror descent to explore model structural sparsity.It has a simple discretization, called the Split Linearized Bregman Iteration, whose global convergence analysis in deep learning is established that from any initializations, algorithmic iterations converge to a critical point of empirical risks.Experimental evidence shows that SplitLBI may achieve state-of-the-art performance in large scale training on ImageNet-2012 dataset etc., while with it unveils effective subnet architecture with comparable test accuracies to dense models after retraining instead of pruning well-trained ones.'" + ] + }, + "execution_count": 365, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[341]" + ] + }, + { + "cell_type": "code", + "execution_count": 132, + "id": "6f9d229e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['A fundamental question in reinforcement learning is whether model-free algorithms are sample efficient.', 'Recently, Jin et al. (2018) proposed a Q-learning algorithm with UCB exploration policy, and proved it has nearly optimal regret bound for finite-horizon episodic MDP.', 'In this paper, we adapt Q-learning with UCB-exploration bonus to infinite-horizon MDP with discounted rewards \\\\\\\\emph{without} accessing a generative model.', 'We show that the \\\\\\\\textit{sample complexity of exploration} of our algorithm is bounded by $\\\\\\\\tilde{O}({\\\\\\\\frac{SA}{\\\\\\\\epsilon^2(1-\\\\\\\\gamma)^7}})$.', 'This improves the previously best known result of $\\\\\\\\tilde{O}({\\\\\\\\frac{SA}{\\\\\\\\epsilon^4(1-\\\\\\\\gamma)^8}})$ in this setting achieved by delayed Q-learning (Strehlet al., 2006),, and matches the lower bound in terms of $\\\\\\\\epsilon$ as well as $S$ and $A$ up to logarithmic factors.']\"" + ] + }, + "execution_count": 132, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[616]" + ] + }, + { + "cell_type": "code", + "execution_count": 366, + "id": "8f62c99a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'A fundamental question in reinforcement learning is whether model-free algorithms are sample efficient.Recently, Jin et al. proposed a Q-learning algorithm with UCB exploration policy, and proved it has nearly optimal regret bound for finite-horizon episodic MDP.In this paper, we adapt Q-learning with UCB-exploration bonus to infinite-horizon MDP with discounted rewards accessing a generative model.We show that the of our algorithm is bounded by.This improves the previously best known result of in this setting achieved by delayed Q-learning,, and matches the lower bound in terms of as well as and up to logarithmic factors.'" + ] + }, + "execution_count": 366, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[616]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c1d3dac9", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "raw", + "id": "55e564ef", + "metadata": {}, + "source": [ + "r'\\$[^$]*\\$'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "984e6041", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 134, + "id": "28b0c12f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['We study the convergence of gradient descent (GD) and stochastic gradient descent (SGD) for training $L$-hidden-layer linear residual networks (ResNets).', 'We prove that for training deep residual networks with certain linear transformations at input and output layers, which are fixed throughout training, both GD and SGD with zero initialization on all hidden weights can converge to the global minimum of the training loss.', 'Moreover, when specializing to appropriate Gaussian random linear transformations, GD and SGD provably optimize wide enough deep linear ResNets.', 'Compared with the global convergence result of GD for training standard deep linear networks \\\\\\\\citep{du2019width}, our condition on the neural network width is sharper by a factor of $O(\\\\\\\\kappa L)$, where $\\\\\\\\kappa$ denotes the condition number of the covariance matrix of the training data.', 'In addition, for the first time we establish the global convergence of SGD for training deep linear ResNets and prove a linear convergence rate when the global minimum is $0$.']\"" + ] + }, + "execution_count": 134, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[82]" + ] + }, + { + "cell_type": "code", + "execution_count": 367, + "id": "99a6af69", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'We study the convergence of gradient descent and stochastic gradient descent for training-hidden-layer linear residual networks.We prove that for training deep residual networks with certain linear transformations at input and output layers, which are fixed throughout training, both GD and SGD with zero initialization on all hidden weights can converge to the global minimum of the training loss.Moreover, when specializing to appropriate Gaussian random linear transformations, GD and SGD provably optimize wide enough deep linear ResNets.Compared with the global convergence result of GD for training standard deep linear networks p, our condition on the neural network width is sharper by a factor of, where denotes the condition number of the covariance matrix of the training data.In addition, for the first time we establish the global convergence of SGD for training deep linear ResNets and prove a linear convergence rate when the global minimum is.'" + ] + }, + "execution_count": 367, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[82]" + ] + }, + { + "cell_type": "code", + "execution_count": 136, + "id": "c45aebd4", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['One of the mysteries in the success of neural networks is randomly initialized first order methods like gradient descent can achieve zero training loss even though the objective function is non-convex and non-smooth.', 'This paper demystifies this surprising phenomenon for two-layer fully connected ReLU activated neural networks.', 'For an $m$ hidden node shallow neural network with ReLU activation and $n$ training data, we show as long as $m$ is large enough and no two inputs are parallel, randomly initialized gradient descent converges to a globally optimal solution at a linear convergence rate for the quadratic loss function.\\\\n\\\\n', 'Our analysis relies on the following observation: over-parameterization and random initialization jointly restrict every weight vector to be close to its initialization for all iterations, which allows us to exploit a strong convexity-like property to show that gradient descent converges at a global linear rate to the global optimum.', 'We believe these insights are also useful in analyzing deep models and other first order methods.']\"" + ] + }, + "execution_count": 136, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[131]" + ] + }, + { + "cell_type": "code", + "execution_count": 368, + "id": "5311e012", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'One of the mysteries in the success of neural networks is randomly initialized first order methods like gradient descent can achieve zero training loss even though the objective function is non-convex and non-smooth.This paper demystifies this surprising phenomenon for two-layer fully connected ReLU activated neural networks.For an hidden node shallow neural network with ReLU activation and training data, we show as long as is large enough and no two inputs are parallel, randomly initialized gradient descent converges to a globally optimal solution at a linear convergence rate for the quadratic loss function.Our analysis relies on the following observation: over-parameterization and random initialization jointly restrict every weight vector to be close to its initialization for all iterations, which allows us to exploit a strong convexity-like property to show that gradient descent converges at a global linear rate to the global optimum.We believe these insights are also useful in analyzing deep models and other first order methods.'" + ] + }, + "execution_count": 368, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[131]" + ] + }, + { + "cell_type": "code", + "execution_count": 138, + "id": "6a4f3a57", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['In this paper we present the first freely available dataset for the development and evaluation of domain adaptation methods, for the sound event detection task.', 'The dataset contains 40 log mel-band energies extracted from $100$ different synthetic sound event tracks, with additive noise from nine different acoustic scenes (from indoor, outdoor, and vehicle environments), mixed at six different sound-to-noise ratios, SNRs, (from -12 to -27 dB with a step of -3 dB), and totaling to 5400 (9 * 100 * 6) sound files and a total length of 30 564 minutes.', 'We provide the dataset as is, the code to re-create the dataset and remix the sound event tracks and the acoustic scenes with different SNRs, and a baseline method that tests the adaptation performance with the proposed dataset and establishes some first results.']\"" + ] + }, + "execution_count": 138, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[356]" + ] + }, + { + "cell_type": "code", + "execution_count": 369, + "id": "aea9a398", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'In this paper we present the first freely available dataset for the development and evaluation of domain adaptation methods, for the sound event detection task.The dataset contains 40 log mel-band energies extracted from different synthetic sound event tracks, with additive noise from nine different acoustic scenes, mixed at six different sound-to-noise ratios, SNRs,, and totaling to 5400 sound files and a total length of 30 564 minutes.We provide the dataset as is, the code to re-create the dataset and remix the sound event tracks and the acoustic scenes with different SNRs, and a baseline method that tests the adaptation performance with the proposed dataset and establishes some first results.'" + ] + }, + "execution_count": 369, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[356]" + ] + }, + { + "cell_type": "code", + "execution_count": 140, + "id": "d9ac4598", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['A fundamental question in reinforcement learning is whether model-free algorithms are sample efficient.', 'Recently, Jin et al. (2018) proposed a Q-learning algorithm with UCB exploration policy, and proved it has nearly optimal regret bound for finite-horizon episodic MDP.', 'In this paper, we adapt Q-learning with UCB-exploration bonus to infinite-horizon MDP with discounted rewards \\\\\\\\emph{without} accessing a generative model.', 'We show that the \\\\\\\\textit{sample complexity of exploration} of our algorithm is bounded by $\\\\\\\\tilde{O}({\\\\\\\\frac{SA}{\\\\\\\\epsilon^2(1-\\\\\\\\gamma)^7}})$.', 'This improves the previously best known result of $\\\\\\\\tilde{O}({\\\\\\\\frac{SA}{\\\\\\\\epsilon^4(1-\\\\\\\\gamma)^8}})$ in this setting achieved by delayed Q-learning (Strehlet al., 2006),, and matches the lower bound in terms of $\\\\\\\\epsilon$ as well as $S$ and $A$ up to logarithmic factors.']\"" + ] + }, + "execution_count": 140, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[616]" + ] + }, + { + "cell_type": "code", + "execution_count": 370, + "id": "8eca80ee", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'A fundamental question in reinforcement learning is whether model-free algorithms are sample efficient.Recently, Jin et al. proposed a Q-learning algorithm with UCB exploration policy, and proved it has nearly optimal regret bound for finite-horizon episodic MDP.In this paper, we adapt Q-learning with UCB-exploration bonus to infinite-horizon MDP with discounted rewards accessing a generative model.We show that the of our algorithm is bounded by.This improves the previously best known result of in this setting achieved by delayed Q-learning,, and matches the lower bound in terms of as well as and up to logarithmic factors.'" + ] + }, + "execution_count": 370, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[616]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4fc58bec", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "raw", + "id": "c6f20983", + "metadata": {}, + "source": [ + "\\\\\\\\" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0fe27eaf", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 142, + "id": "7942be02", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['Creating a knowledge base that is accurate, up-to-date and complete remains a significant challenge despite substantial efforts in automated knowledge base construction. ', 'In this paper, we present Alexandria -- a system for unsupervised, high-precision knowledge base construction.', 'Alexandria uses a probabilistic program to define a process of converting knowledge base facts into unstructured text. ', 'Using probabilistic inference, we can invert this program and so retrieve facts, schemas and entities from web text.', 'The use of a probabilistic program allows uncertainty in the text to be propagated through to the retrieved facts, which increases accuracy and helps merge facts from multiple sources.', 'Because Alexandria does not require labelled training data, knowledge bases can be constructed with the minimum of manual input.', 'We demonstrate this by constructing a high precision (typically 97\\\\\\\\%+) knowledge base for people from a single seed fact.']\"" + ] + }, + "execution_count": 142, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[233]" + ] + }, + { + "cell_type": "code", + "execution_count": 371, + "id": "b54d492b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Creating a knowledge base that is accurate, up-to-date and complete remains a significant challenge despite substantial efforts in automated knowledge base construction. In this paper, we present Alexandria -- a system for unsupervised, high-precision knowledge base construction.Alexandria uses a probabilistic program to define a process of converting knowledge base facts into unstructured text. Using probabilistic inference, we can invert this program and so retrieve facts, schemas and entities from web text.The use of a probabilistic program allows uncertainty in the text to be propagated through to the retrieved facts, which increases accuracy and helps merge facts from multiple sources.Because Alexandria does not require labelled training data, knowledge bases can be constructed with the minimum of manual input.We demonstrate this by constructing a high precision knowledge base for people from a single seed fact.'" + ] + }, + "execution_count": 371, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[233]" + ] + }, + { + "cell_type": "code", + "execution_count": 144, + "id": "c370282b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'[\\'There has been recent interest in improving performance of simple models for multiple reasons such as interpretability, robust learning from small data, deployment in memory constrained settings as well as environmental considerations.\\', \\'In this paper, we propose a novel method SRatio that can utilize information from high performing complex models (viz. deep neural networks, boosted trees, random forests) to reweight a training dataset for a potentially low performing simple model such as a decision tree or a shallow network enhancing its performance.\\', \"Our method also leverages the per sample hardness estimate of the simple model which is not the case with the prior works which primarily consider the complex model\\'s confidences/predictions and is thus conceptually novel.\", \\'Moreover, we generalize and formalize the concept of attaching probes to intermediate layers of a neural network, which was one of the main ideas in previous work \\\\\\\\citep{profweight}, to other commonly used classifiers and incorporate this into our method.\\', \\'The benefit of these contributions is witnessed in the experiments where on 6 UCI datasets and CIFAR-10 we outperform competitors in a majority (16 out of 27) of the cases and tie for best performance in the remaining cases.\\', \"In fact, in a couple of cases, we even approach the complex model\\'s performance.\", \\'We also conduct further experiments to validate assertions and intuitively understand why our method works.\\', \\'Theoretically, we motivate our approach by showing that the weighted loss minimized by simple models using our weighting upper bounds the loss of the complex model.\\']'" + ] + }, + "execution_count": 144, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[595]" + ] + }, + { + "cell_type": "code", + "execution_count": 375, + "id": "4720acd4", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'There has been recent interest in improving performance of simple models for multiple reasons such as interpretability, robust learning from small data, deployment in memory constrained settings as well as environmental considerations.In this paper, we propose a novel method SRatio that can utilize information from high performing complex models to reweight a training dataset for a potentially low performing simple model such as a decision tree or a shallow network enhancing its performance.Our method also leverages the per sample hardness estimate of the simple model which is not the case with the prior works which primarily consider the complex models confidences/predictions and is thus conceptually novel.Moreover, we generalize and formalize the concept of attaching probes to intermediate layers of a neural network, which was one of the main ideas in previous work p, to other commonly used classifiers and incorporate this into our method.The benefit of these contributions is witnessed in the experiments where on 6 UCI datasets and CIFAR-10 we outperform competitors in a majority of the cases and tie for best performance in the remaining cases.In fact, in a couple of cases, we even approach the complex models performance.We also conduct further experiments to validate assertions and intuitively understand why our method works.Theoretically, we motivate our approach by showing that the weighted loss minimized by simple models using our weighting upper bounds the loss of the complex model.'" + ] + }, + "execution_count": 375, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.Clean_Text[595] " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3aeb37ee", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "17fba94f", + "metadata": {}, + "source": [ + "### Remove TexT -- > VaL" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1932d2dd", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 243, + "id": "43ff8022", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\([^)]*\\)')\n", + "\n", + "for index, row in NN_SciTLDR_dev.iterrows(): \n", + " row['Clean_Text'] = pattern.sub(r'\\0',row['Clean_Text'])\n", + "\n", + " \n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace('\\x00',''))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 244, + "id": "b0043bc1", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\{[^}]*\\}')\n", + "\n", + "for index, row in NN_SciTLDR_dev.iterrows(): \n", + " row['Clean_Text'] = pattern.sub(r'\\0',row['Clean_Text'])\n", + "\n", + " \n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace('\\x00','')) " + ] + }, + { + "cell_type": "code", + "execution_count": 245, + "id": "5ad8e39e", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\$[^$]*\\$')\n", + "\n", + "for index, row in NN_SciTLDR_dev.iterrows(): \n", + " row['Clean_Text'] = pattern.sub(r'\\0',row['Clean_Text'])\n", + "\n", + " \n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace('\\x00','')) " + ] + }, + { + "cell_type": "code", + "execution_count": 246, + "id": "4f716895", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\\\n')\n", + "\n", + "for index, row in NN_SciTLDR_dev.iterrows(): \n", + " row['Clean_Text'] = pattern.sub(r'\\0',row['Clean_Text'])\n", + "\n", + " \n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace('\\x00',''))" + ] + }, + { + "cell_type": "code", + "execution_count": 247, + "id": "7ea44323", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\', \\'')\n", + "\n", + "for index, row in NN_SciTLDR_dev.iterrows(): \n", + " row['Clean_Text'] = pattern.sub(r'\\0',row['Clean_Text'])\n", + "\n", + " \n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace('\\x00','')) " + ] + }, + { + "cell_type": "code", + "execution_count": 248, + "id": "227c5a79", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace('\\']','@]'))\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace('[\\'','@['))\n", + "\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace('\"]','@]'))\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace('[\"','@['))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 249, + "id": "ba46b743", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace('@]',''))\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace('@[',''))\n", + "\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace('@]',''))\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace('@[',''))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 250, + "id": "b1630cfa", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace('.\\', \\'','.'))" + ] + }, + { + "cell_type": "code", + "execution_count": 359, + "id": "ebafe466", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace('``',''))\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0414840f", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 358, + "id": "01c7f5da", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace(\"\\\\\\\\cite\",''))\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace(\"\\\\\\\\emph\",''))\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace(\"\\\\t\",''))\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace(\"\\\\\\\\textit\",''))\n", + "\n", + "\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace('\\\\\\\\%','%'))\n", + "\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace('extit','@'))\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace(\"\\\\@\",''))\n", + "\n", + "\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace(\"\\\\\\\\\",''))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 373, + "id": "504d6b97", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace(\".\\', \\\"\",'.'))\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace(\".\\\", \\'\",'.'))\n", + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace(\"\\'\",''))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 374, + "id": "1d7c97e1", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_dev[\"Clean_Text\"] = NN_SciTLDR_dev[\"Clean_Text\"].apply(lambda x: x.replace(' ',' '))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 575, + "id": "db304d58", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "793ba9c5", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5d9ae3c5", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 377, + "id": "00626716", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(1970, 6)" + ] + }, + "execution_count": 377, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 378, + "id": "d5d045e7", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(618, 6)" + ] + }, + "execution_count": 378, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 379, + "id": "045e0e80", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(613, 6)" + ] + }, + "execution_count": 379, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4bee3b22", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "95abc9ff", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "markdown", + "id": "003d47c2", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8aa0a33e", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "933b9f16", + "metadata": {}, + "source": [ + "## SummarY" + ] + }, + { + "cell_type": "markdown", + "id": "f996817f", + "metadata": {}, + "source": [ + "### NN_SciTLDR_train" + ] + }, + { + "cell_type": "code", + "execution_count": 403, + "id": "1229cad0", + "metadata": {}, + "outputs": [], + "source": [ + "# pattern = re.compile(r'\\\\\\\\')\n", + "# df = NN_SciTLDR_train[NN_SciTLDR_train['Clean_Summary'].str.contains(pattern)]\n", + "# df" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "21030bc2", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 404, + "id": "9de7b1ff", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['A simple modification to low-rank factorization that improves performances (in both image and language tasks) while still being compact.']\"" + ] + }, + "execution_count": 404, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[5]" + ] + }, + { + "cell_type": "code", + "execution_count": 434, + "id": "cc5b66cb", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'A simple modification to low-rank factorization that improves performances (in both image and language tasks) while still being compact.'" + ] + }, + "execution_count": 434, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[5]" + ] + }, + { + "cell_type": "code", + "execution_count": 406, + "id": "282410fe", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['We propose the Neuro-Symbolic Concept Learner (NS-CL), a model that learns visual concepts, words, and semantic parsing of sentences without explicit supervision on any of them.']\"" + ] + }, + "execution_count": 406, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[78]" + ] + }, + { + "cell_type": "code", + "execution_count": 435, + "id": "45561dc1", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'We propose the Neuro-Symbolic Concept Learner (NS-CL), a model that learns visual concepts, words, and semantic parsing of sentences without explicit supervision on any of them.'" + ] + }, + "execution_count": 435, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[78]" + ] + }, + { + "cell_type": "code", + "execution_count": 408, + "id": "a7418bbc", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['We proposed a unified Generative Adversarial Networks (GAN) framework to learn noise-aware knowledge graph embedding.']\"" + ] + }, + "execution_count": 408, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[1985]" + ] + }, + { + "cell_type": "code", + "execution_count": 436, + "id": "11d3e2c9", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'We proposed a unified Generative Adversarial Networks (GAN) framework to learn noise-aware knowledge graph embedding.'" + ] + }, + "execution_count": 436, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[1985]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9ef3eff0", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 410, + "id": "0708fc5a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['It is shown that ResNet-type CNNs are a universal approximator and its expression ability is not worse than fully connected neural networks (FNNs) with a \\\\\\\\textit{block-sparse} structure even if the size of each layer in the CNN is fixed.']\"" + ] + }, + "execution_count": 410, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[593]" + ] + }, + { + "cell_type": "code", + "execution_count": 439, + "id": "5af953a1", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'It is shown that ResNet-type CNNs are a universal approximator and its expression ability is not worse than fully connected neural networks (FNNs) with a structure even if the size of each layer in the CNN is fixed.'" + ] + }, + "execution_count": 439, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[593]" + ] + }, + { + "cell_type": "code", + "execution_count": 412, + "id": "8d5b613c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['A novel encoding scheme of using {-1, +1} to decompose QNNs into multi-branch binary networks, in which we used bitwise operations (xnor and bitcount) to achieve model compression, computational acceleration and resource saving. ']\"" + ] + }, + "execution_count": 412, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[1047]" + ] + }, + { + "cell_type": "code", + "execution_count": 440, + "id": "5eb580a9", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'A novel encoding scheme of using to decompose QNNs into multi-branch binary networks, in which we used bitwise operations (xnor and bitcount) to achieve model compression, computational acceleration and resource saving. '" + ] + }, + "execution_count": 440, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[1047]" + ] + }, + { + "cell_type": "code", + "execution_count": 414, + "id": "f3a64744", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['This paper improves the quality of the recently proposed adversarial feature leaning (AFL) approach for incorporating explicit constrains to representations, by introducing the concept of the {\\\\\\\\em vulnerableness} of the adversary. ']\"" + ] + }, + "execution_count": 414, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[1661]" + ] + }, + { + "cell_type": "code", + "execution_count": 441, + "id": "a381cbff", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'This paper improves the quality of the recently proposed adversarial feature leaning (AFL) approach for incorporating explicit constrains to representations, by introducing the concept of the of the adversary. '" + ] + }, + "execution_count": 441, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[1661]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9f3e1933", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 416, + "id": "f9a27a2b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['We compare deep model-based and model-free RL algorithms by studying the approximability of $Q$-functions, policies, and dynamics by neural networks. ']\"" + ] + }, + "execution_count": 416, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[299]" + ] + }, + { + "cell_type": "code", + "execution_count": 442, + "id": "7e10bef1", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'We compare deep model-based and model-free RL algorithms by studying the approximability of-functions, policies, and dynamics by neural networks. '" + ] + }, + "execution_count": 442, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[299]" + ] + }, + { + "cell_type": "code", + "execution_count": 418, + "id": "f8e8e51f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['We consider tackling a single-agent RL problem by distributing it to $n$ learners.']\"" + ] + }, + "execution_count": 418, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[1246]" + ] + }, + { + "cell_type": "code", + "execution_count": 443, + "id": "62ef638e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'We consider tackling a single-agent RL problem by distributing it to learners.'" + ] + }, + "execution_count": 443, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[1246]" + ] + }, + { + "cell_type": "code", + "execution_count": 420, + "id": "15eab52a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['We introduce a method to train models with provable robustness wrt all the $l_p$-norms for $p\\\\\\\\geq 1$ simultaneously.']\"" + ] + }, + "execution_count": 420, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[1543]" + ] + }, + { + "cell_type": "code", + "execution_count": 444, + "id": "3ce5022e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'We introduce a method to train models with provable robustness wrt all the-norms for simultaneously.'" + ] + }, + "execution_count": 444, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[1543]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6fd71fa6", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 422, + "id": "a742fe8f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['We revisit the simple idea of pruning connections of DNNs through $\\\\\\\\ell_1$ regularization achieving state-of-the-art results on multiple datasets with theoretic guarantees.']\"" + ] + }, + "execution_count": 422, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[928]" + ] + }, + { + "cell_type": "code", + "execution_count": 445, + "id": "3165c929", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'We revisit the simple idea of pruning connections of DNNs through regularization achieving state-of-the-art results on multiple datasets with theoretic guarantees.'" + ] + }, + "execution_count": 445, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[928]" + ] + }, + { + "cell_type": "code", + "execution_count": 424, + "id": "e127b79a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['Higher momentum parameter $\\\\\\\\beta$ helps for escaping saddle points faster']\"" + ] + }, + "execution_count": 424, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[1316]" + ] + }, + { + "cell_type": "code", + "execution_count": 446, + "id": "ccd5779f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Higher momentum parameter helps for escaping saddle points faster'" + ] + }, + "execution_count": 446, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[1316]" + ] + }, + { + "cell_type": "code", + "execution_count": 426, + "id": "c36f8a28", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['This paper improves the quality of the recently proposed adversarial feature leaning (AFL) approach for incorporating explicit constrains to representations, by introducing the concept of the {\\\\\\\\em vulnerableness} of the adversary. ']\"" + ] + }, + "execution_count": 426, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[1661]" + ] + }, + { + "cell_type": "code", + "execution_count": 447, + "id": "9a4d1cad", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'This paper improves the quality of the recently proposed adversarial feature leaning (AFL) approach for incorporating explicit constrains to representations, by introducing the concept of the of the adversary. '" + ] + }, + "execution_count": 447, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Clean_Summary'].loc[1661]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8d96c6c1", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "def23618", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 438, + "id": "7b043008", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 606, + "id": "5819dc8e", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 428, + "id": "9392eed3", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace('\\']','@]'))\n", + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace('[\\'','@['))\n", + "\n", + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace('\"]','@]'))\n", + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace('[\"','@['))\n", + "\n", + "\n", + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace('@]',''))\n", + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace('@[',''))\n", + "\n", + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace('@]',''))\n", + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace('@[',''))\n", + "\n", + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace('\\', \\'',''))\n", + "\n", + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace('@',''))\n", + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace('\\\\\\'s','\\'s'))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 429, + "id": "6995c17d", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\$[^$]*\\$')\n", + "\n", + "for index, row in NN_SciTLDR_train.iterrows(): \n", + " row['Clean_Summary'] = pattern.sub(r'\\0',row['Clean_Summary'])\n", + "\n", + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace('\\x00',''))" + ] + }, + { + "cell_type": "code", + "execution_count": 430, + "id": "31419de9", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\{[^}]*\\}')\n", + "\n", + "for index, row in NN_SciTLDR_train.iterrows(): \n", + " row['Clean_Summary'] = pattern.sub(r'\\0',row['Clean_Summary'])\n", + "\n", + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace('\\x00',''))" + ] + }, + { + "cell_type": "code", + "execution_count": 476, + "id": "e00a5f6b", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\\\n')\n", + "\n", + "for index, row in NN_SciTLDR_train.iterrows(): \n", + " row['Clean_Summary'] = pattern.sub(r'\\0',row['Clean_Summary'])\n", + "\n", + " \n", + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace('\\x00',''))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a1b45c29", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c45be4a4", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "002844c5", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace(\"\\\\\\\\textit\",''))\n", + "\n", + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace('extit','@'))\n", + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace(\"\\\\@\",''))\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2fac57e1", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 433, + "id": "f21d453c", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_train[\"Clean_Summary\"] = NN_SciTLDR_train[\"Clean_Summary\"].apply(lambda x: x.replace(' ',' '))\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b6a3c70d", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a28c4902", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ac366532", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "ccbfaf51", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5bdef7a4", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "0347701d", + "metadata": {}, + "source": [ + "### NN_SciTLDR_test" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "86677e09", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 477, + "id": "55aa924b", + "metadata": {}, + "outputs": [], + "source": [ + "# pattern = re.compile(r'\\\\\\\\')\n", + "# df = NN_SciTLDR_test[NN_SciTLDR_test['Clean_Summary'].str.contains(pattern)]\n", + "# df" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d12bd2af", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 449, + "id": "f906f9b6", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['We introduce Mol-CycleGAN - a new generative model for optimization of molecules to augment drug design.', 'The paper presents an approach for optimizing molecular properties based on the application of CycleGANs to variational autoencoders for molecules and employs a domain-specific VAE called Junction Tree VAE (JT-VAE).', 'This paper uses a variational autoencoders to learn a translation function, from the set of molecules without the interested property to the set of molecules with the property. ']\"" + ] + }, + "execution_count": 449, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[24]" + ] + }, + { + "cell_type": "code", + "execution_count": 486, + "id": "0b958a18", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'We introduce Mol-CycleGAN - a new generative model for optimization of molecules to augment drug design.The paper presents an approach for optimizing molecular properties based on the application of CycleGANs to variational autoencoders for molecules and employs a domain-specific VAE called Junction Tree VAE (JT-VAE).This paper uses a variational autoencoders to learn a translation function, from the set of molecules without the interested property to the set of molecules with the property. '" + ] + }, + "execution_count": 486, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[24]" + ] + }, + { + "cell_type": "code", + "execution_count": 451, + "id": "c578fefb", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['We present a simple and general method to train a single neural network executable at different widths (number of channels in a layer), permitting instant and adaptive accuracy-efficiency trade-offs at runtime.', 'The paper proposes an idea of combining different size models together into one shared net, greatly improving performance for detection', 'This paper trains a single network executable at different widths.']\"" + ] + }, + "execution_count": 451, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[584]" + ] + }, + { + "cell_type": "code", + "execution_count": 487, + "id": "6eb51158", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'We present a simple and general method to train a single neural network executable at different widths (number of channels in a layer), permitting instant and adaptive accuracy-efficiency trade-offs at runtime.The paper proposes an idea of combining different size models together into one shared net, greatly improving performance for detectionThis paper trains a single network executable at different widths.'" + ] + }, + "execution_count": 487, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[584]" + ] + }, + { + "cell_type": "code", + "execution_count": 453, + "id": "84c2de26", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['Decoding the last token in the context using the predicted next token distribution acts as a regularizer and improves language modeling.', 'The authors introduce the idea of past decoding for the purpose of regularization for improved perplexity on Penn Treebank', 'Proposes an additional loss term to use when training an LSTM LM and shows that by adding this loss term they can achieve SOTA perplexity on a number of LM benchmarks.', 'Suggests a new regularization technique which can be added on top of those used in AWD-LSTM of Merity et al. (2017) with little overhead.']\"" + ] + }, + "execution_count": 453, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[610]" + ] + }, + { + "cell_type": "code", + "execution_count": 488, + "id": "dfe5e36f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Decoding the last token in the context using the predicted next token distribution acts as a regularizer and improves language modeling.The authors introduce the idea of past decoding for the purpose of regularization for improved perplexity on Penn TreebankProposes an additional loss term to use when training an LSTM LM and shows that by adding this loss term they can achieve SOTA perplexity on a number of LM benchmarks.Suggests a new regularization technique which can be added on top of those used in AWD-LSTM of Merity et al. (2017) with little overhead.'" + ] + }, + "execution_count": 488, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[610]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4632364a", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 455, + "id": "45e0acd2", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'[\\'We look at SGD as a trajectory in the space of probability measures, show its connection to Markov processes, propose a simple Markov model of SGD learning, and experimentally compare it with SGD using information theoretic quantities. \\', \\'Constructs a Markov chain that follows a shorted path in TV metric on P and shows that trajectories of SGD and \\\\\\\\alpha-SMLC have similar conditional entropy\\', \\'Studies the trajectory of H(\\\\\\\\hat{y}) versus H(\\\\\\\\hat{y}|y) on the information plane for stochastic gradient descent methods for training neural networks\\', \"Describes SGD from the point of view of the distribution p(y\\',y) where y is (a possibly corrupted) true class-label and y\\' a model prediction.\"]'" + ] + }, + "execution_count": 455, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[53]" + ] + }, + { + "cell_type": "code", + "execution_count": 505, + "id": "f01ff46d", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"We look at SGD as a trajectory in the space of probability measures, show its connection to Markov processes, propose a simple Markov model of SGD learning, and experimentally compare it with SGD using information theoretic quantities. Constructs a Markov chain that follows a shorted path in TV metric on P and shows that trajectories of SGD and alpha-SMLC have similar conditional entropyStudies the trajectory of H(hat) versus H(hat|y) on the information plane for stochastic gradient descent methods for training neural networks Describes SGD from the point of view of the distribution p(y',y) where y is (a possibly corrupted) true class-label and y' a model prediction.\"" + ] + }, + "execution_count": 505, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[53]" + ] + }, + { + "cell_type": "code", + "execution_count": 457, + "id": "b44dba96", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['we propose a regularizer that improves the classification performance of neural networks', 'the authors propose to train a model from a point of maximizing mutual information between the predictions and the true outputs, with a regularization term that minimizes irrelevant information while learning.', 'Proposes to decompose the parameters into an invertible feature map F and a linear transformation w in the last layer to maximize mutual information I(Y, \\\\\\\\hat{T}) while constraining irrelevant information']\"" + ] + }, + "execution_count": 457, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[290]" + ] + }, + { + "cell_type": "code", + "execution_count": 490, + "id": "e997d204", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'we propose a regularizer that improves the classification performance of neural networksthe authors propose to train a model from a point of maximizing mutual information between the predictions and the true outputs, with a regularization term that minimizes irrelevant information while learning.Proposes to decompose the parameters into an invertible feature map F and a linear transformation w in the last layer to maximize mutual information I(Y, \\\\\\\\hat) while constraining irrelevant information'" + ] + }, + "execution_count": 490, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[290]" + ] + }, + { + "cell_type": "code", + "execution_count": 459, + "id": "a5320885", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['We created a new dataset for data interpretation over plots and also propose a baseline for the same.', 'The authors propose a pipeline to solve the DIP problem involving learning from datasets containing triplets of the form {plot, question, answer}', 'Proposes an algorithm that can interpret data shown in scientific plots.']\"" + ] + }, + "execution_count": 459, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[465]" + ] + }, + { + "cell_type": "code", + "execution_count": 491, + "id": "efa646cd", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'We created a new dataset for data interpretation over plots and also propose a baseline for the same.The authors propose a pipeline to solve the DIP problem involving learning from datasets containing triplets of the formProposes an algorithm that can interpret data shown in scientific plots.'" + ] + }, + "execution_count": 491, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[465]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "68add268", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 461, + "id": "44f620fd", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['We prove that parallel local SGD achieves linear speedup with much lesser communication than parallel mini-batch SGD.', 'Provides a convergence proof for local SGD, and proves that local SGD can provide the same speedup gains as minibatch, but may be able to communicate significantly less.', 'This paper presents an analysis of local SGD and bounds on how frequent the estimators obtained by running SGD required to be averaged in order to yield linear parallelization speedups.', 'The authors analyze the local SGD algorithm, where $K$ parallel chains of SGD are run, and the iterates are occasionally synchronized across machines by averaging']\"" + ] + }, + "execution_count": 461, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[117]" + ] + }, + { + "cell_type": "code", + "execution_count": 492, + "id": "de194221", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'We prove that parallel local SGD achieves linear speedup with much lesser communication than parallel mini-batch SGD.Provides a convergence proof for local SGD, and proves that local SGD can provide the same speedup gains as minibatch, but may be able to communicate significantly less.This paper presents an analysis of local SGD and bounds on how frequent the estimators obtained by running SGD required to be averaged in order to yield linear parallelization speedups.The authors analyze the local SGD algorithm, where parallel chains of SGD are run, and the iterates are occasionally synchronized across machines by averaging'" + ] + }, + "execution_count": 492, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[117]" + ] + }, + { + "cell_type": "code", + "execution_count": 463, + "id": "11c0195a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'[\\'Driven by the need for parallelizable, open-loop hyperparameter optimization methods, we propose the use of $k$-determinantal point processes in hyperparameter optimization via random search.\\', \\'Proposes using the k-DPP to select candidate points in hyperparameter searches.\\', \\'The authors propose k-DPP as an open loop method for hyperparameter optimization and provide its empirical study and comparison with other methods.\\', \"Considers non-sequential and uninformed hyperparameter search using determinantal point processes, which are probability distributions over subsets of a ground set with the property that subsets with more \\'diverse\\' elements haev higher probability\"]'" + ] + }, + "execution_count": 463, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[278]" + ] + }, + { + "cell_type": "code", + "execution_count": 504, + "id": "21cf0634", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"Driven by the need for parallelizable, open-loop hyperparameter optimization methods, we propose the use of-determinantal point processes in hyperparameter optimization via random search.Proposes using the k-DPP to select candidate points in hyperparameter searches.The authors propose k-DPP as an open loop method for hyperparameter optimization and provide its empirical study and comparison with other methods. Considers non-sequential and uninformed hyperparameter search using determinantal point processes, which are probability distributions over subsets of a ground set with the property that subsets with more 'diverse' elements haev higher probability\"" + ] + }, + "execution_count": 504, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[278]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2b1e4258", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 465, + "id": "ca80f01e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['We train a neural network to output approximately optimal weights as a function of hyperparameters.', '\\\\nHyper-networks for hyper-parameter optimization in neural networks.']\"" + ] + }, + "execution_count": 465, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[344]" + ] + }, + { + "cell_type": "code", + "execution_count": 494, + "id": "31558e72", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'We train a neural network to output approximately optimal weights as a function of hyperparameters.Hyper-networks for hyper-parameter optimization in neural networks.'" + ] + }, + "execution_count": 494, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[344]" + ] + }, + { + "cell_type": "code", + "execution_count": 467, + "id": "c14ae6ce", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['We study the state equation of a recurrent neural network. We show that SGD can efficiently learn the unknown dynamics from few input/output observations under proper assumptions.', 'The paper studies discrete-time dynamical systems with a non-linear state equation, proving that running SGD on a fixed-length trajectory gives logarithmic convergence.', 'This work considers the problem of learning a non-linear dynamical system in which the output equals the state. ', '\\\\nThis paper studies the ability of SGD to learn dynamics of a linear system and non-linear activation.']\"" + ] + }, + "execution_count": 467, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[582]" + ] + }, + { + "cell_type": "code", + "execution_count": 495, + "id": "ca6eafd5", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'We study the state equation of a recurrent neural network. We show that SGD can efficiently learn the unknown dynamics from few input/output observations under proper assumptions.The paper studies discrete-time dynamical systems with a non-linear state equation, proving that running SGD on a fixed-length trajectory gives logarithmic convergence.This work considers the problem of learning a non-linear dynamical system in which the output equals the state.This paper studies the ability of SGD to learn dynamics of a linear system and non-linear activation.'" + ] + }, + "execution_count": 495, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[582]" + ] + }, + { + "cell_type": "code", + "execution_count": 469, + "id": "580beaed", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['An algorithm for training neural networks efficiently on temporally redundant data.', 'The paper describes a neural coding scheme for spike based learning in deep neural networks', '\\\\nThis paper presents a method for spike based learning that aims at reducing the needed computation during learning and testing when classifying temporal redundant data.', 'This paper applies a predictive coding version of the Sigma-Delta encoding scheme to reduce a computational load on a deep learning network, combining the three components in a way not seen previously.']\"" + ] + }, + "execution_count": 469, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[555]" + ] + }, + { + "cell_type": "code", + "execution_count": 496, + "id": "3dca84a9", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'An algorithm for training neural networks efficiently on temporally redundant data.The paper describes a neural coding scheme for spike based learning in deep neural networksThis paper presents a method for spike based learning that aims at reducing the needed computation during learning and testing when classifying temporal redundant data.This paper applies a predictive coding version of the Sigma-Delta encoding scheme to reduce a computational load on a deep learning network, combining the three components in a way not seen previously.'" + ] + }, + "execution_count": 496, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[555]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8eb5be46", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 471, + "id": "81e00207", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'[\\'We look at SGD as a trajectory in the space of probability measures, show its connection to Markov processes, propose a simple Markov model of SGD learning, and experimentally compare it with SGD using information theoretic quantities. \\', \\'Constructs a Markov chain that follows a shorted path in TV metric on P and shows that trajectories of SGD and \\\\\\\\alpha-SMLC have similar conditional entropy\\', \\'Studies the trajectory of H(\\\\\\\\hat{y}) versus H(\\\\\\\\hat{y}|y) on the information plane for stochastic gradient descent methods for training neural networks\\', \"Describes SGD from the point of view of the distribution p(y\\',y) where y is (a possibly corrupted) true class-label and y\\' a model prediction.\"]'" + ] + }, + "execution_count": 471, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[53]" + ] + }, + { + "cell_type": "code", + "execution_count": 503, + "id": "a74a91fd", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"We look at SGD as a trajectory in the space of probability measures, show its connection to Markov processes, propose a simple Markov model of SGD learning, and experimentally compare it with SGD using information theoretic quantities. Constructs a Markov chain that follows a shorted path in TV metric on P and shows that trajectories of SGD and alpha-SMLC have similar conditional entropyStudies the trajectory of H(hat) versus H(hat|y) on the information plane for stochastic gradient descent methods for training neural networks Describes SGD from the point of view of the distribution p(y',y) where y is (a possibly corrupted) true class-label and y' a model prediction.\"" + ] + }, + "execution_count": 503, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[53]" + ] + }, + { + "cell_type": "code", + "execution_count": 502, + "id": "2f92b0e2", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'we propose a regularizer that improves the classification performance of neural networksthe authors propose to train a model from a point of maximizing mutual information between the predictions and the true outputs, with a regularization term that minimizes irrelevant information while learning.Proposes to decompose the parameters into an invertible feature map F and a linear transformation w in the last layer to maximize mutual information I(Y, hat) while constraining irrelevant information'" + ] + }, + "execution_count": 502, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[290]" + ] + }, + { + "cell_type": "code", + "execution_count": 506, + "id": "f6dbf69d", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'we propose a regularizer that improves the classification performance of neural networksthe authors propose to train a model from a point of maximizing mutual information between the predictions and the true outputs, with a regularization term that minimizes irrelevant information while learning.Proposes to decompose the parameters into an invertible feature map F and a linear transformation w in the last layer to maximize mutual information I(Y, hat) while constraining irrelevant information'" + ] + }, + "execution_count": 506, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Clean_Summary'].loc[290]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2cba8094", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f21024a4", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 478, + "id": "3201e596", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace('\\']','@]'))\n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace('[\\'','@['))\n", + "\n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace('\"]','@]'))\n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace('[\"','@['))\n", + "\n", + "\n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace('@]',''))\n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace('@[',''))\n", + "\n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace('@]',''))\n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace('@[',''))\n", + "\n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace('\\', \\'',''))\n", + "\n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace('@',''))\n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace('\\\\\\'s','\\'s'))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 479, + "id": "b33ebf0e", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\$[^$]*\\$')\n", + "\n", + "for index, row in NN_SciTLDR_test.iterrows(): \n", + " row['Clean_Summary'] = pattern.sub(r'\\0',row['Clean_Summary'])\n", + "\n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace('\\x00',''))" + ] + }, + { + "cell_type": "code", + "execution_count": 480, + "id": "8aafa229", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\{[^}]*\\}')\n", + "\n", + "for index, row in NN_SciTLDR_test.iterrows(): \n", + " row['Clean_Summary'] = pattern.sub(r'\\0',row['Clean_Summary'])\n", + "\n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace('\\x00',''))" + ] + }, + { + "cell_type": "code", + "execution_count": 481, + "id": "b1ae44f7", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\\\n')\n", + "\n", + "for index, row in NN_SciTLDR_test.iterrows(): \n", + " row['Clean_Summary'] = pattern.sub(r'\\0',row['Clean_Summary'])\n", + "\n", + " \n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace('\\x00',''))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c9e836d3", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 482, + "id": "85f3192e", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace(\"\\\\\\\\textit\",''))\n", + "\n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace('extit','@'))\n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace(\"\\\\@\",''))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 499, + "id": "7e871d19", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace('\\', \"',' '))\n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace('\\\\\\\\',''))\n", + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace('.\\', \"','.'))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 501, + "id": "7155fe16", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_test[\"Clean_Summary\"] = NN_SciTLDR_test[\"Clean_Summary\"].apply(lambda x: x.replace(' ',' '))\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e4be8a6a", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "48ff7b55", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "a42ad1b8", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "markdown", + "id": "841ca036", + "metadata": {}, + "source": [ + "### NN_SciTLDR_dev" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "11305f63", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 475, + "id": "ca013656", + "metadata": {}, + "outputs": [], + "source": [ + "# pattern = re.compile(r'\\\\\\\\')\n", + "# df = NN_SciTLDR_dev[NN_SciTLDR_dev['Clean_Summary'].str.contains(pattern)]\n", + "# df" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "96121808", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 507, + "id": "f8820d53", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'[\\'Understanding the structure of knowledge graph representation using insight from word embeddings.\\', \"This paper attempts to understand the latent structure underlying knowledge graph embedding methods, and demonstrates that a model\\'s ability to represent a relation type depends on the model architecture\\'s limitations with respect to relation conditions.\", \"This paper proposes a detailed study on the explainability of link prediction (LP) models by utilizing a recent interpretation of word embeddings to provide a better understanding of LPs\\' model performance.\"]'" + ] + }, + "execution_count": 507, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Clean_Summary'].loc[34]" + ] + }, + { + "cell_type": "code", + "execution_count": 533, + "id": "8719e893", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"Understanding the structure of knowledge graph representation using insight from word embeddings.This paper attempts to understand the latent structure underlying knowledge graph embedding methods, and demonstrates that a model's ability to represent a relation type depends on the model architecture's limitations with respect to relation conditions.This paper proposes a detailed study on the explainability of link prediction (LP) models by utilizing a recent interpretation of word embeddings to provide a better understanding of LPs' model performance.\"" + ] + }, + "execution_count": 533, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Clean_Summary'].loc[34]" + ] + }, + { + "cell_type": "code", + "execution_count": 509, + "id": "7be274b3", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['We identify some universal patterns (i.e., holding across architectures) in the behavior of different surrogate losses (CE, MSE, 0-1 loss) while training neural networks and present supporting empirical evidence.']\"" + ] + }, + "execution_count": 509, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Clean_Summary'].loc[618]" + ] + }, + { + "cell_type": "code", + "execution_count": 534, + "id": "20916da1", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'We identify some universal patterns (i.e., holding across architectures) in the behavior of different surrogate losses (CE, MSE, 0-1 loss) while training neural networks and present supporting empirical evidence.'" + ] + }, + "execution_count": 534, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Clean_Summary'].loc[618]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "eace9a9e", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 511, + "id": "886eb6ca", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['This paper deals with stability of simple gradient penalty $\\\\\\\\mu$-WGAN optimization by introducing a concept of measure valued differentiation.', 'WGAN with a squared zero centered gradient penalty term w.r.t. to a general measure is studied.', 'Characterizes the convergence of gradient penalized Wasserstein GAN.']\"" + ] + }, + "execution_count": 511, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Clean_Summary'].loc[250]" + ] + }, + { + "cell_type": "code", + "execution_count": 535, + "id": "e13be4c4", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'This paper deals with stability of simple gradient penalty-WGAN optimization by introducing a concept of measure valued differentiation.WGAN with a squared zero centered gradient penalty term w.r.t. to a general measure is studied.Characterizes the convergence of gradient penalized Wasserstein GAN.'" + ] + }, + "execution_count": 535, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Clean_Summary'].loc[250]" + ] + }, + { + "cell_type": "code", + "execution_count": 513, + "id": "5e452985", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['We show that conventional regularization methods (e.g., $L_2$, dropout), which have been largely ignored in RL methods, can be very effective in policy optimization.', 'The authors study a set of existing direct policy optimization methods in the field of reinforcement learning and provide a detailed investigation on the effect of regulations on the performance and behavior of agents following these methods.', 'This paper provides a study on the effect of regularization on performance in training environments in policy optimization methods in multiple continuous control tasks.']\"" + ] + }, + "execution_count": 513, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Clean_Summary'].loc[257]" + ] + }, + { + "cell_type": "code", + "execution_count": 536, + "id": "30348a5c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'We show that conventional regularization methods (e.g.,, dropout), which have been largely ignored in RL methods, can be very effective in policy optimization.The authors study a set of existing direct policy optimization methods in the field of reinforcement learning and provide a detailed investigation on the effect of regulations on the performance and behavior of agents following these methods.This paper provides a study on the effect of regularization on performance in training environments in policy optimization methods in multiple continuous control tasks.'" + ] + }, + "execution_count": 536, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Clean_Summary'].loc[257]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e69143b9", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 515, + "id": "9d16d499", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['We release a dataset constructed from single-lead ECG data from 11,000 patients who were prescribed to use the {DEVICENAME}(TM) device.', 'This paper describes a large-scale ECG dataset the authors intend to publish and provides unsupervised analysis and visualization of the dataset.']\"" + ] + }, + "execution_count": 515, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Clean_Summary'].loc[473]" + ] + }, + { + "cell_type": "code", + "execution_count": 537, + "id": "d715d927", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'We release a dataset constructed from single-lead ECG data from 11,000 patients who were prescribed to use the {DEVICENAME}(TM) device.This paper describes a large-scale ECG dataset the authors intend to publish and provides unsupervised analysis and visualization of the dataset.'" + ] + }, + "execution_count": 537, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Clean_Summary'].loc[473]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c6f84358", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 517, + "id": "d1c81c9d", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['We propose a framework that learns to encode knowledge symbolically and generate programs to reason about the encoded knowledge.', '\\\\nThe authors propose the N-Gram machine to answer questions over long documents.', 'This paper presents the n-gram machine, a model that encodes sentences into simple symbolic representations which can be queried efficiently.']\"" + ] + }, + "execution_count": 517, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Clean_Summary'].loc[166]" + ] + }, + { + "cell_type": "code", + "execution_count": 538, + "id": "a9223a14", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'We propose a framework that learns to encode knowledge symbolically and generate programs to reason about the encoded knowledge.The authors propose the N-Gram machine to answer questions over long documents.This paper presents the n-gram machine, a model that encodes sentences into simple symbolic representations which can be queried efficiently.'" + ] + }, + "execution_count": 538, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Clean_Summary'].loc[166]" + ] + }, + { + "cell_type": "code", + "execution_count": 519, + "id": "fd967af0", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['We propose a novel meta-learning framework for transductive inference that classifies the entire test set at once to alleviate the low-data problem.', '\\\\nThis paper proposes to address few-shot learning in a transductive way by learning a label propagation model in an end-to-end manner, the first to learn label propagation for transductive few-shot learning and produced effective empirical results. ', 'This paper proposes a meta-learning framework that leverages unlabeled data by learning the graph-based label propogation in an end-to-end manner.', 'Studies few-host learning in a transductive setting: using meta learning to learn to propagate labels from training samples to test samples. ']\"" + ] + }, + "execution_count": 519, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Clean_Summary'].loc[172]" + ] + }, + { + "cell_type": "code", + "execution_count": 539, + "id": "15bd96e6", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'We propose a novel meta-learning framework for transductive inference that classifies the entire test set at once to alleviate the low-data problem.This paper proposes to address few-shot learning in a transductive way by learning a label propagation model in an end-to-end manner, the first to learn label propagation for transductive few-shot learning and produced effective empirical results. This paper proposes a meta-learning framework that leverages unlabeled data by learning the graph-based label propogation in an end-to-end manner.Studies few-host learning in a transductive setting: using meta learning to learn to propagate labels from training samples to test samples. '" + ] + }, + "execution_count": 539, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Clean_Summary'].loc[172]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b89ab8e7", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 521, + "id": "79029e0e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['This paper deals with stability of simple gradient penalty $\\\\\\\\mu$-WGAN optimization by introducing a concept of measure valued differentiation.', 'WGAN with a squared zero centered gradient penalty term w.r.t. to a general measure is studied.', 'Characterizes the convergence of gradient penalized Wasserstein GAN.']\"" + ] + }, + "execution_count": 521, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Clean_Summary'].loc[250]" + ] + }, + { + "cell_type": "code", + "execution_count": 540, + "id": "b5fc8492", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'This paper deals with stability of simple gradient penalty-WGAN optimization by introducing a concept of measure valued differentiation.WGAN with a squared zero centered gradient penalty term w.r.t. to a general measure is studied.Characterizes the convergence of gradient penalized Wasserstein GAN.'" + ] + }, + "execution_count": 540, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Clean_Summary'].loc[250]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9eba3b98", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8217b354", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e9c16308", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 522, + "id": "a8f63cec", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace('\\']','@]'))\n", + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace('[\\'','@['))\n", + "\n", + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace('\"]','@]'))\n", + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace('[\"','@['))\n", + "\n", + "\n", + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace('@]',''))\n", + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace('@[',''))\n", + "\n", + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace('@]',''))\n", + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace('@[',''))\n", + "\n", + "\n", + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace('\\', \\'',''))\n", + "\n", + "\n", + "\n", + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace('@',''))\n", + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace('\\\\\\'s','\\'s'))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 523, + "id": "6272d13b", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\$[^$]*\\$')\n", + "\n", + "for index, row in NN_SciTLDR_dev.iterrows(): \n", + " row['Clean_Summary'] = pattern.sub(r'\\0',row['Clean_Summary'])\n", + " \n", + " \n", + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace('\\x00',''))" + ] + }, + { + "cell_type": "code", + "execution_count": 524, + "id": "37ab4bc4", + "metadata": {}, + "outputs": [], + "source": [ + "pattern = re.compile(r'\\\\n')\n", + "\n", + "for index, row in NN_SciTLDR_dev.iterrows(): \n", + " row['Clean_Summary'] = pattern.sub(r'\\0',row['Clean_Summary'])\n", + " \n", + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace(' \\x00',''))\n", + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace('\\x00',''))" + ] + }, + { + "cell_type": "code", + "execution_count": 525, + "id": "43d57aa4", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace(\"\\\\\\\\textit\",''))\n", + "\n", + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace('extit','@'))\n", + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace(\"\\\\@\",''))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 531, + "id": "d13adfc8", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace('.\\', \"','.'))\n", + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace('.\", \\\"','.'))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 541, + "id": "226d33a0", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_dev[\"Clean_Summary\"] = NN_SciTLDR_dev[\"Clean_Summary\"].apply(lambda x: x.replace(' ',' '))\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "eca40271", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "f90c4eaa", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "69b9f1e3", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "06ef61cc", + "metadata": {}, + "source": [ + "# SpliT" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "80eaf17f", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 542, + "id": "5397ed04", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_train['T_Clean_Text'] = NN_SciTLDR_train.apply(lambda x: nltk.tokenize.word_tokenize(x['Clean_Text']), axis=1)\n", + "NN_SciTLDR_train['T_Clean_Summary']= NN_SciTLDR_train.apply(lambda x: nltk.tokenize.word_tokenize(x['Clean_Summary']), axis=1)\n", + "\n", + "NN_SciTLDR_test['T_Clean_Text'] = NN_SciTLDR_test.apply(lambda x: nltk.tokenize.word_tokenize(x['Clean_Text']), axis=1)\n", + "NN_SciTLDR_test['T_Clean_Summary']= NN_SciTLDR_test.apply(lambda x: nltk.tokenize.word_tokenize(x['Clean_Summary']), axis=1)\n", + "\n", + "NN_SciTLDR_dev['T_Clean_Text'] = NN_SciTLDR_dev.apply(lambda x: nltk.tokenize.word_tokenize(x['Clean_Text']), axis=1)\n", + "NN_SciTLDR_dev['T_Clean_Summary']= NN_SciTLDR_dev.apply(lambda x: nltk.tokenize.word_tokenize(x['Clean_Summary']), axis=1)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "da5ec0ce", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "aacc72d9", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a3c670d4", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 543, + "id": "d331d211", + "metadata": {}, + "outputs": [], + "source": [ + "Len_SciTLDR_train_Text=[]\n", + "for index, row in NN_SciTLDR_train.iterrows():\n", + " Len_SciTLDR_train_Text.append(len( row['T_Clean_Text']))\n", + "\n", + "\n", + "Len_SciTLDR_train_Summary=[]\n", + "for index, row in NN_SciTLDR_train.iterrows():\n", + " Len_SciTLDR_train_Summary.append(len( row['T_Clean_Summary']))\n", + "\n", + "#############################################################################################\n", + "\n", + "Len_SciTLDR_test_Text=[]\n", + "for index, row in NN_SciTLDR_test.iterrows():\n", + " Len_SciTLDR_test_Text.append(len( row['T_Clean_Text']))\n", + "\n", + "\n", + "Len_SciTLDR_test_Summary=[]\n", + "for index, row in NN_SciTLDR_test.iterrows():\n", + " Len_SciTLDR_test_Summary.append(len( row['T_Clean_Summary']))\n", + "\n", + "##############################################################################################\n", + "\n", + "Len_SciTLDR_Val_Text=[]\n", + "for index, row in NN_SciTLDR_dev.iterrows():\n", + " Len_SciTLDR_Val_Text.append(len( row['T_Clean_Text']))\n", + "\n", + "\n", + "Len_SciTLDR_Val_Summary=[]\n", + "for index, row in NN_SciTLDR_dev.iterrows():\n", + " Len_SciTLDR_Val_Summary.append(len( row['T_Clean_Summary']))\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "02a4da33", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a08654bd", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 544, + "id": "abd8c48f", + "metadata": {}, + "outputs": [], + "source": [ + "# New column\n", + "\n", + "NN_SciTLDR_train['Len_SciTLDR_train_Text'] = Len_SciTLDR_train_Text\n", + "NN_SciTLDR_train['Len_SciTLDR_train_Summary'] = Len_SciTLDR_train_Summary\n", + "\n", + "\n", + "NN_SciTLDR_test['Len_SciTLDR_test_Text'] = Len_SciTLDR_test_Text\n", + "NN_SciTLDR_test['Len_SciTLDR_test_Summary'] = Len_SciTLDR_test_Summary\n", + "\n", + "\n", + "NN_SciTLDR_dev['Len_SciTLDR_Val_Text'] = Len_SciTLDR_Val_Text\n", + "NN_SciTLDR_dev['Len_SciTLDR_Val_Summary'] = Len_SciTLDR_Val_Summary\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b0bf3209", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d359551c", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 545, + "id": "6b18eb8e", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_train['RangEe'] = NN_SciTLDR_train['Len_SciTLDR_train_Text'] - NN_SciTLDR_train['Len_SciTLDR_train_Summary']" + ] + }, + { + "cell_type": "code", + "execution_count": 546, + "id": "f78d3c07", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_test['RangEe'] = NN_SciTLDR_test['Len_SciTLDR_test_Text'] - NN_SciTLDR_test['Len_SciTLDR_test_Summary']" + ] + }, + { + "cell_type": "code", + "execution_count": 547, + "id": "e3c1e384", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_dev['RangEe'] = NN_SciTLDR_dev['Len_SciTLDR_Val_Text'] - NN_SciTLDR_dev['Len_SciTLDR_Val_Summary']" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "de436422", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 548, + "id": "f703318a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Clean_TitleClean_TextClean_SummaryL_Clean_TextL_Clean_SummaryL_Clean_TitleT_Clean_TextT_Clean_SummaryLen_SciTLDR_train_TextLen_SciTLDR_train_SummaryRangEe
0Critical Points of Linear Neural Networks: Ana...Due to the success of deep learning to solving...We provide necessary and sufficient analytical...['due to the success of deep learning to solvi...['we provide necessary and sufficient analytic...critical points of linear neural networks: ana...[Due, to, the, success, of, deep, learning, to...[We, provide, necessary, and, sufficient, anal...17840138
1Biologically-Plausible Learning Algorithms Can...The backpropagation algorithm is often thought...Biologically plausible learning algorithms, pa...['the backpropagation (bp) algorithm is often ...['biologically plausible learning algorithms, ...biologically-plausible learning algorithms can...[The, backpropagation, algorithm, is, often, t...[Biologically, plausible, learning, algorithms...19512183
2Logic and the 2-Simplicial TransformerWe introduce the 2-simplicial Transformer, an ...We introduce the 2-simplicial Transformer and ...['we introduce the 2-simplicial transformer, a...['we introduce the 2-simplicial transformer an...logic and the 2-simplicial transformer[We, introduce, the, 2-simplicial, Transformer...[We, introduce, the, 2-simplicial, Transformer...572631
3Long-term Forecasting using Tensor-Train RNNsWe present Tensor-Train RNN, a novel family of...Accurate forecasting over very long time horiz...['we present tensor-train rnn (tt-rnn), a nove...['accurate forecasting over very long time hor...long-term forecasting using tensor-train rnns[We, present, Tensor-Train, RNN, ,, a, novel, ...[Accurate, forecasting, over, very, long, time...13810128
4Variational Message Passing with Structured In...Recent efforts on combining deep models with p...We propose a variational message-passing algor...['recent efforts on combining deep models with...['we propose a variational message-passing alg...variational message passing with structured in...[Recent, efforts, on, combining, deep, models,...[We, propose, a, variational, message-passing,...1161997
\n", + "
" + ], + "text/plain": [ + " Clean_Title \\\n", + "0 Critical Points of Linear Neural Networks: Ana... \n", + "1 Biologically-Plausible Learning Algorithms Can... \n", + "2 Logic and the 2-Simplicial Transformer \n", + "3 Long-term Forecasting using Tensor-Train RNNs \n", + "4 Variational Message Passing with Structured In... \n", + "\n", + " Clean_Text \\\n", + "0 Due to the success of deep learning to solving... \n", + "1 The backpropagation algorithm is often thought... \n", + "2 We introduce the 2-simplicial Transformer, an ... \n", + "3 We present Tensor-Train RNN, a novel family of... \n", + "4 Recent efforts on combining deep models with p... \n", + "\n", + " Clean_Summary \\\n", + "0 We provide necessary and sufficient analytical... \n", + "1 Biologically plausible learning algorithms, pa... \n", + "2 We introduce the 2-simplicial Transformer and ... \n", + "3 Accurate forecasting over very long time horiz... \n", + "4 We propose a variational message-passing algor... \n", + "\n", + " L_Clean_Text \\\n", + "0 ['due to the success of deep learning to solvi... \n", + "1 ['the backpropagation (bp) algorithm is often ... \n", + "2 ['we introduce the 2-simplicial transformer, a... \n", + "3 ['we present tensor-train rnn (tt-rnn), a nove... \n", + "4 ['recent efforts on combining deep models with... \n", + "\n", + " L_Clean_Summary \\\n", + "0 ['we provide necessary and sufficient analytic... \n", + "1 ['biologically plausible learning algorithms, ... \n", + "2 ['we introduce the 2-simplicial transformer an... \n", + "3 ['accurate forecasting over very long time hor... \n", + "4 ['we propose a variational message-passing alg... \n", + "\n", + " L_Clean_Title \\\n", + "0 critical points of linear neural networks: ana... \n", + "1 biologically-plausible learning algorithms can... \n", + "2 logic and the 2-simplicial transformer \n", + "3 long-term forecasting using tensor-train rnns \n", + "4 variational message passing with structured in... \n", + "\n", + " T_Clean_Text \\\n", + "0 [Due, to, the, success, of, deep, learning, to... \n", + "1 [The, backpropagation, algorithm, is, often, t... \n", + "2 [We, introduce, the, 2-simplicial, Transformer... \n", + "3 [We, present, Tensor-Train, RNN, ,, a, novel, ... \n", + "4 [Recent, efforts, on, combining, deep, models,... \n", + "\n", + " T_Clean_Summary Len_SciTLDR_train_Text \\\n", + "0 [We, provide, necessary, and, sufficient, anal... 178 \n", + "1 [Biologically, plausible, learning, algorithms... 195 \n", + "2 [We, introduce, the, 2-simplicial, Transformer... 57 \n", + "3 [Accurate, forecasting, over, very, long, time... 138 \n", + "4 [We, propose, a, variational, message-passing,... 116 \n", + "\n", + " Len_SciTLDR_train_Summary RangEe \n", + "0 40 138 \n", + "1 12 183 \n", + "2 26 31 \n", + "3 10 128 \n", + "4 19 97 " + ] + }, + "execution_count": 548, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.head()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a9459b75", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c3f5870b", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 549, + "id": "602cab4e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True 1970\n", + "Name: RangEe_TexT_SummarY, dtype: int64" + ] + }, + "execution_count": 549, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['RangEe_TexT_SummarY'] = (NN_SciTLDR_train['RangEe']>10)\n", + "NN_SciTLDR_train['RangEe_TexT_SummarY'].value_counts()\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "256bcdb2", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 550, + "id": "38490cf2", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True 600\n", + "False 18\n", + "Name: RangEe_TexT_SummarY, dtype: int64" + ] + }, + "execution_count": 550, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['RangEe_TexT_SummarY'] = (NN_SciTLDR_test['RangEe']>10)\n", + "NN_SciTLDR_test['RangEe_TexT_SummarY'].value_counts()\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7eb8f0ae", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 551, + "id": "bd093b2a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True 602\n", + "False 11\n", + "Name: RangEe_TexT_SummarY, dtype: int64" + ] + }, + "execution_count": 551, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['RangEe_TexT_SummarY'] = (NN_SciTLDR_dev['RangEe']>10)\n", + "NN_SciTLDR_dev['RangEe_TexT_SummarY'].value_counts()\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "40fb9052", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 552, + "id": "66be2650", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Clean_TitleClean_TextClean_SummaryL_Clean_TextL_Clean_SummaryL_Clean_TitleT_Clean_TextT_Clean_SummaryLen_SciTLDR_train_TextLen_SciTLDR_train_SummaryRangEeRangEe_TexT_SummarY
\n", + "
" + ], + "text/plain": [ + "Empty DataFrame\n", + "Columns: [Clean_Title, Clean_Text, Clean_Summary, L_Clean_Text, L_Clean_Summary, L_Clean_Title, T_Clean_Text, T_Clean_Summary, Len_SciTLDR_train_Text, Len_SciTLDR_train_Summary, RangEe, RangEe_TexT_SummarY]\n", + "Index: []" + ] + }, + "execution_count": 552, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train[~NN_SciTLDR_train['RangEe_TexT_SummarY']]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 553, + "id": "6d5d582f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Clean_TitleClean_TextClean_SummaryL_Clean_TextL_Clean_SummaryL_Clean_TitleT_Clean_TextT_Clean_SummaryLen_SciTLDR_test_TextLen_SciTLDR_test_SummaryRangEeRangEe_TexT_SummarY
34Cramer-Wold AutoEncoderAssessing distance betweeen the true and the s...Inspired by prior work on Sliced-Wasserstein A...['assessing distance betweeen the true and the...['inspired by prior work on sliced-wasserstein...cramer-wold autoencoder[Assessing, distance, betweeen, the, true, and...[Inspired, by, prior, work, on, Sliced-Wassers...8889-1False
80Distributed Prioritized Experience ReplayWe propose a distributed architecture for deep...A distributed architecture for deep reinforcem...['we propose a distributed architecture for de...['a distributed architecture for deep reinforc...distributed prioritized experience replay[We, propose, a, distributed, architecture, fo...[A, distributed, architecture, for, deep, rein...1211165False
113Sample-Efficient Deep Reinforcement Learning v...We propose Episodic Backward Update - a new al...We propose Episodic Backward Update, a novel d...['we propose episodic backward update - a new ...['we propose episodic backward update, a novel...sample-efficient deep reinforcement learning v...[We, propose, Episodic, Backward, Update, -, a...[We, propose, Episodic, Backward, Update, ,, a...101116-15False
128Deep clustering based on a mixture of autoenco...In this paper we propose a Deep Autoencoder Mi...We propose a deep clustering method where inst...['in this paper we propose a deep autoencoder ...['we propose a deep clustering method where in...deep clustering based on a mixture of autoenco...[In, this, paper, we, propose, a, Deep, Autoen...[We, propose, a, deep, clustering, method, whe...1151078False
133On the Ineffectiveness of Variance Reduced Opt...The application of stochastic variance reducti...The SVRG method fails on modern deep learning ...['the application of stochastic variance reduc...['the svrg method fails on modern deep learnin...on the ineffectiveness of variance reduced opt...[The, application, of, stochastic, variance, r...[The, SVRG, method, fails, on, modern, deep, l...5560-5False
178Neural Network Cost Landscapes as Quantum StatesQuantum computers promise significant advantag...We show that NN parameter and hyperparameter c...['quantum computers promise significant advant...['we show that nn parameter and hyperparameter...neural network cost landscapes as quantum states[Quantum, computers, promise, significant, adv...[We, show, that, NN, parameter, and, hyperpara...1121102False
234Holographic and other Point Set Distances for ...We introduce an analytic distance function for...Permutation-invariant loss function for point ...['we introduce an analytic distance function f...['permutation-invariant loss function for poin...holographic and other point set distances for ...[We, introduce, an, analytic, distance, functi...[Permutation-invariant, loss, function, for, p...6876-8False
258Visceral Machines: Risk-Aversion in Reinforce...As people learn to navigate the world, autono...We present a novel approach to reinforcement l...[' as people learn to navigate the world, auto...['we present a novel approach to reinforcement...visceral machines: risk-aversion in reinforce...[As, people, learn, to, navigate, the, world, ...[We, present, a, novel, approach, to, reinforc...11710710False
297Compact Neural Networks based on the Multiscal...The goal of this paper is to demonstrate a met...We replace the fully connected layers of a neu...['the goal of this paper is to demonstrate a m...['we replace the fully connected layers of a n...compact neural networks based on the multiscal...[The, goal, of, this, paper, is, to, demonstra...[We, replace, the, fully, connected, layers, o...1029210False
362Boosting Robustness Certification of Neural Ne...We present a novel approach for the certificat...We refine the over-approximation results from ...['we present a novel approach for the certific...['we refine the over-approximation results fro...boosting robustness certification of neural ne...[We, present, a, novel, approach, for, the, ce...[We, refine, the, over-approximation, results,...4479-35False
422Bias-Variance Decomposition for Boltzmann Mach...We achieve bias-variance decomposition for Bol...We achieve bias-variance decomposition for Bol...['we achieve bias-variance decomposition for b...['we achieve bias-variance decomposition for b...bias-variance decomposition for boltzmann mach...[We, achieve, bias-variance, decomposition, fo...[We, achieve, bias-variance, decomposition, fo...68653False
499Loss-aware Weight Quantization of Deep NetworksThe huge size of deep networks hinders their u...A loss-aware weight quantization algorithm tha...['the huge size of deep networks hinders their...['a loss-aware weight quantization algorithm t...loss-aware weight quantization of deep networks[The, huge, size, of, deep, networks, hinders,...[A, loss-aware, weight, quantization, algorith...7680-4False
531Online Hyper-Parameter OptimizationWe propose an efficient online hyperparameter ...An algorithm for optimizing regularization hyp...['we propose an efficient online hyperparamete...['an algorithm for optimizing regularization h...online hyper-parameter optimization[We, propose, an, efficient, online, hyperpara...[An, algorithm, for, optimizing, regularizatio...6273-11False
553Self-Organization adds application robustness ...While self-organizing principles have motivate...integration of self-organization and supervise...['while self-organizing principles have motiva...['integration of self-organization and supervi...self-organization adds application robustness ...[While, self-organizing, principles, have, mot...[integration, of, self-organization, and, supe...98953False
562Wasserstein proximal of GANsWe introduce a new method for training GANs by...We propose the Wasserstein proximal method for...['we introduce a new method for training gans ...['we propose the wasserstein proximal method f...wasserstein proximal of gans[We, introduce, a, new, method, for, training,...[We, propose, the, Wasserstein, proximal, meth...81810False
581Domain Adaptation for Deep Reinforcement Learn...Many deep reinforcement learning approaches us...An approach to learning a shared embedding spa...['many deep reinforcement learning approaches ...['an approach to learning a shared embedding s...domain adaptation for deep reinforcement learn...[Many, deep, reinforcement, learning, approach...[An, approach, to, learning, a, shared, embedd...7390-17False
589Quadrature-based features for kernel approxima...We consider the problem of improving kernel ap...Quadrature rules for kernel approximation.The ...['we consider the problem of improving kernel ...['quadrature rules for kernel approximation.',...quadrature-based features for kernel approxima...[We, consider, the, problem, of, improving, ke...[Quadrature, rules, for, kernel, approximation...81801False
612Neural Program Search: Solving Data Processing...We present a Neural Program Search, an algorit...Program synthesis from natural language descri...['we present a neural program search, an algor...['program synthesis from natural language desc...neural program search: solving data processing...[We, present, a, Neural, Program, Search, ,, a...[Program, synthesis, from, natural, language, ...8388-5False
\n", + "
" + ], + "text/plain": [ + " Clean_Title \\\n", + "34 Cramer-Wold AutoEncoder \n", + "80 Distributed Prioritized Experience Replay \n", + "113 Sample-Efficient Deep Reinforcement Learning v... \n", + "128 Deep clustering based on a mixture of autoenco... \n", + "133 On the Ineffectiveness of Variance Reduced Opt... \n", + "178 Neural Network Cost Landscapes as Quantum States \n", + "234 Holographic and other Point Set Distances for ... \n", + "258 Visceral Machines: Risk-Aversion in Reinforce... \n", + "297 Compact Neural Networks based on the Multiscal... \n", + "362 Boosting Robustness Certification of Neural Ne... \n", + "422 Bias-Variance Decomposition for Boltzmann Mach... \n", + "499 Loss-aware Weight Quantization of Deep Networks \n", + "531 Online Hyper-Parameter Optimization \n", + "553 Self-Organization adds application robustness ... \n", + "562 Wasserstein proximal of GANs \n", + "581 Domain Adaptation for Deep Reinforcement Learn... \n", + "589 Quadrature-based features for kernel approxima... \n", + "612 Neural Program Search: Solving Data Processing... \n", + "\n", + " Clean_Text \\\n", + "34 Assessing distance betweeen the true and the s... \n", + "80 We propose a distributed architecture for deep... \n", + "113 We propose Episodic Backward Update - a new al... \n", + "128 In this paper we propose a Deep Autoencoder Mi... \n", + "133 The application of stochastic variance reducti... \n", + "178 Quantum computers promise significant advantag... \n", + "234 We introduce an analytic distance function for... \n", + "258 As people learn to navigate the world, autono... \n", + "297 The goal of this paper is to demonstrate a met... \n", + "362 We present a novel approach for the certificat... \n", + "422 We achieve bias-variance decomposition for Bol... \n", + "499 The huge size of deep networks hinders their u... \n", + "531 We propose an efficient online hyperparameter ... \n", + "553 While self-organizing principles have motivate... \n", + "562 We introduce a new method for training GANs by... \n", + "581 Many deep reinforcement learning approaches us... \n", + "589 We consider the problem of improving kernel ap... \n", + "612 We present a Neural Program Search, an algorit... \n", + "\n", + " Clean_Summary \\\n", + "34 Inspired by prior work on Sliced-Wasserstein A... \n", + "80 A distributed architecture for deep reinforcem... \n", + "113 We propose Episodic Backward Update, a novel d... \n", + "128 We propose a deep clustering method where inst... \n", + "133 The SVRG method fails on modern deep learning ... \n", + "178 We show that NN parameter and hyperparameter c... \n", + "234 Permutation-invariant loss function for point ... \n", + "258 We present a novel approach to reinforcement l... \n", + "297 We replace the fully connected layers of a neu... \n", + "362 We refine the over-approximation results from ... \n", + "422 We achieve bias-variance decomposition for Bol... \n", + "499 A loss-aware weight quantization algorithm tha... \n", + "531 An algorithm for optimizing regularization hyp... \n", + "553 integration of self-organization and supervise... \n", + "562 We propose the Wasserstein proximal method for... \n", + "581 An approach to learning a shared embedding spa... \n", + "589 Quadrature rules for kernel approximation.The ... \n", + "612 Program synthesis from natural language descri... \n", + "\n", + " L_Clean_Text \\\n", + "34 ['assessing distance betweeen the true and the... \n", + "80 ['we propose a distributed architecture for de... \n", + "113 ['we propose episodic backward update - a new ... \n", + "128 ['in this paper we propose a deep autoencoder ... \n", + "133 ['the application of stochastic variance reduc... \n", + "178 ['quantum computers promise significant advant... \n", + "234 ['we introduce an analytic distance function f... \n", + "258 [' as people learn to navigate the world, auto... \n", + "297 ['the goal of this paper is to demonstrate a m... \n", + "362 ['we present a novel approach for the certific... \n", + "422 ['we achieve bias-variance decomposition for b... \n", + "499 ['the huge size of deep networks hinders their... \n", + "531 ['we propose an efficient online hyperparamete... \n", + "553 ['while self-organizing principles have motiva... \n", + "562 ['we introduce a new method for training gans ... \n", + "581 ['many deep reinforcement learning approaches ... \n", + "589 ['we consider the problem of improving kernel ... \n", + "612 ['we present a neural program search, an algor... \n", + "\n", + " L_Clean_Summary \\\n", + "34 ['inspired by prior work on sliced-wasserstein... \n", + "80 ['a distributed architecture for deep reinforc... \n", + "113 ['we propose episodic backward update, a novel... \n", + "128 ['we propose a deep clustering method where in... \n", + "133 ['the svrg method fails on modern deep learnin... \n", + "178 ['we show that nn parameter and hyperparameter... \n", + "234 ['permutation-invariant loss function for poin... \n", + "258 ['we present a novel approach to reinforcement... \n", + "297 ['we replace the fully connected layers of a n... \n", + "362 ['we refine the over-approximation results fro... \n", + "422 ['we achieve bias-variance decomposition for b... \n", + "499 ['a loss-aware weight quantization algorithm t... \n", + "531 ['an algorithm for optimizing regularization h... \n", + "553 ['integration of self-organization and supervi... \n", + "562 ['we propose the wasserstein proximal method f... \n", + "581 ['an approach to learning a shared embedding s... \n", + "589 ['quadrature rules for kernel approximation.',... \n", + "612 ['program synthesis from natural language desc... \n", + "\n", + " L_Clean_Title \\\n", + "34 cramer-wold autoencoder \n", + "80 distributed prioritized experience replay \n", + "113 sample-efficient deep reinforcement learning v... \n", + "128 deep clustering based on a mixture of autoenco... \n", + "133 on the ineffectiveness of variance reduced opt... \n", + "178 neural network cost landscapes as quantum states \n", + "234 holographic and other point set distances for ... \n", + "258 visceral machines: risk-aversion in reinforce... \n", + "297 compact neural networks based on the multiscal... \n", + "362 boosting robustness certification of neural ne... \n", + "422 bias-variance decomposition for boltzmann mach... \n", + "499 loss-aware weight quantization of deep networks \n", + "531 online hyper-parameter optimization \n", + "553 self-organization adds application robustness ... \n", + "562 wasserstein proximal of gans \n", + "581 domain adaptation for deep reinforcement learn... \n", + "589 quadrature-based features for kernel approxima... \n", + "612 neural program search: solving data processing... \n", + "\n", + " T_Clean_Text \\\n", + "34 [Assessing, distance, betweeen, the, true, and... \n", + "80 [We, propose, a, distributed, architecture, fo... \n", + "113 [We, propose, Episodic, Backward, Update, -, a... \n", + "128 [In, this, paper, we, propose, a, Deep, Autoen... \n", + "133 [The, application, of, stochastic, variance, r... \n", + "178 [Quantum, computers, promise, significant, adv... \n", + "234 [We, introduce, an, analytic, distance, functi... \n", + "258 [As, people, learn, to, navigate, the, world, ... \n", + "297 [The, goal, of, this, paper, is, to, demonstra... \n", + "362 [We, present, a, novel, approach, for, the, ce... \n", + "422 [We, achieve, bias-variance, decomposition, fo... \n", + "499 [The, huge, size, of, deep, networks, hinders,... \n", + "531 [We, propose, an, efficient, online, hyperpara... \n", + "553 [While, self-organizing, principles, have, mot... \n", + "562 [We, introduce, a, new, method, for, training,... \n", + "581 [Many, deep, reinforcement, learning, approach... \n", + "589 [We, consider, the, problem, of, improving, ke... \n", + "612 [We, present, a, Neural, Program, Search, ,, a... \n", + "\n", + " T_Clean_Summary Len_SciTLDR_test_Text \\\n", + "34 [Inspired, by, prior, work, on, Sliced-Wassers... 88 \n", + "80 [A, distributed, architecture, for, deep, rein... 121 \n", + "113 [We, propose, Episodic, Backward, Update, ,, a... 101 \n", + "128 [We, propose, a, deep, clustering, method, whe... 115 \n", + "133 [The, SVRG, method, fails, on, modern, deep, l... 55 \n", + "178 [We, show, that, NN, parameter, and, hyperpara... 112 \n", + "234 [Permutation-invariant, loss, function, for, p... 68 \n", + "258 [We, present, a, novel, approach, to, reinforc... 117 \n", + "297 [We, replace, the, fully, connected, layers, o... 102 \n", + "362 [We, refine, the, over-approximation, results,... 44 \n", + "422 [We, achieve, bias-variance, decomposition, fo... 68 \n", + "499 [A, loss-aware, weight, quantization, algorith... 76 \n", + "531 [An, algorithm, for, optimizing, regularizatio... 62 \n", + "553 [integration, of, self-organization, and, supe... 98 \n", + "562 [We, propose, the, Wasserstein, proximal, meth... 81 \n", + "581 [An, approach, to, learning, a, shared, embedd... 73 \n", + "589 [Quadrature, rules, for, kernel, approximation... 81 \n", + "612 [Program, synthesis, from, natural, language, ... 83 \n", + "\n", + " Len_SciTLDR_test_Summary RangEe RangEe_TexT_SummarY \n", + "34 89 -1 False \n", + "80 116 5 False \n", + "113 116 -15 False \n", + "128 107 8 False \n", + "133 60 -5 False \n", + "178 110 2 False \n", + "234 76 -8 False \n", + "258 107 10 False \n", + "297 92 10 False \n", + "362 79 -35 False \n", + "422 65 3 False \n", + "499 80 -4 False \n", + "531 73 -11 False \n", + "553 95 3 False \n", + "562 81 0 False \n", + "581 90 -17 False \n", + "589 80 1 False \n", + "612 88 -5 False " + ] + }, + "execution_count": 553, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test[~NN_SciTLDR_test['RangEe_TexT_SummarY']]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 554, + "id": "5ec93d0f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Clean_TitleClean_TextClean_SummaryL_Clean_TextL_Clean_SummaryL_Clean_TitleT_Clean_TextT_Clean_SummaryLen_SciTLDR_Val_TextLen_SciTLDR_Val_SummaryRangEeRangEe_TexT_SummarY
61Learning Graph Neural Networks with Noisy LabelsWe study the robustness to symmetric label noi...We apply loss correction to graph neural netwo...['we study the robustness to symmetric label n...['we apply loss correction to graph neural net...learning graph neural networks with noisy labels[We, study, the, robustness, to, symmetric, la...[We, apply, loss, correction, to, graph, neura...4860-12False
125Smooth Kernels Improve Adversarial Robustness ...Recent research has shown that CNNs are often ...We introduce a smoothness regularization for c...['recent research has shown that cnns are ofte...['we introduce a smoothness regularization for...smooth kernels improve adversarial robustness ...[Recent, research, has, shown, that, CNNs, are...[We, introduce, a, smoothness, regularization,...89863False
221Long Short-Term Memory as a Dynamically Comput...Long short-term memory networks were introduce...Gates do all the heavy lifting in LSTMs by com...['long short-term memory networks (lstms) were...['gates do all the heavy lifting in lstms by c...long short-term memory as a dynamically comput...[Long, short-term, memory, networks, were, int...[Gates, do, all, the, heavy, lifting, in, LSTM...134150-16False
297Size-free generalization bounds for convolutio...We prove bounds on the generalization error of...We prove generalization bounds for convolution...['we prove bounds on the generalization error ...['we prove generalization bounds for convoluti...size-free generalization bounds for convolutio...[We, prove, bounds, on, the, generalization, e...[We, prove, generalization, bounds, for, convo...81810False
313Fluctuation-dissipation relations for stochast...The notion of the stationary equilibrium ensem...We prove fluctuation-dissipation relations for...['the notion of the stationary equilibrium ens...['we prove fluctuation-dissipation relations f...fluctuation-dissipation relations for stochast...[The, notion, of, the, stationary, equilibrium...[We, prove, fluctuation-dissipation, relations...99936False
335The Singular Values of Convolutional LayersWe characterize the singular values of the lin...We characterize the singular values of the lin...['we characterize the singular values of the l...['we characterize the singular values of the l...the singular values of convolutional layers[We, characterize, the, singular, values, of, ...[We, characterize, the, singular, values, of, ...7276-4False
349CONTROLLING COVARIATE SHIFT USING EQUILIBRIUM ...We introduce a new normalization technique tha...An alternative normalization technique to batc...['we introduce a new normalization technique t...['an alternative normalization technique to ba...controlling covariate shift using equilibrium ...[We, introduce, a, new, normalization, techniq...[An, alternative, normalization, technique, to...594910False
519Robust Natural Language Representation Learnin...In natural language inference, the semantics o...We use neural networks to project superficial ...['in natural language inference, the semantics...['we use neural networks to project superficia...robust natural language representation learnin...[In, natural, language, inference, ,, the, sem...[We, use, neural, networks, to, project, super...88799False
532Accelerating Convolutional Neural Networks usi...We present the iterative two-pass decompositio...We present the iterative two-pass CP decomposi...['we present the iterative two-pass decomposit...['we present the iterative two-pass cp decompo...accelerating convolutional neural networks usi...[We, present, the, iterative, two-pass, decomp...[We, present, the, iterative, two-pass, CP, de...75732False
585JAUNE: Justified And Unified Neural language E...We review the limitations of BLEU and ROUGE --...Introduces JAUNE: a methodology to replace BLE...['we review the limitations of bleu and rouge ...['introduces jaune: a methodology to replace b...jaune: justified and unified neural language e...[We, review, the, limitations, of, BLEU, and, ...[Introduces, JAUNE, :, a, methodology, to, rep...56506False
596Not-So-Random FeaturesWe propose a principled method for kernel lear...A simple and practical algorithm for learning ...['we propose a principled method for kernel le...['a simple and practical algorithm for learnin...not-so-random features[We, propose, a, principled, method, for, kern...[A, simple, and, practical, algorithm, for, le...7091-21False
\n", + "
" + ], + "text/plain": [ + " Clean_Title \\\n", + "61 Learning Graph Neural Networks with Noisy Labels \n", + "125 Smooth Kernels Improve Adversarial Robustness ... \n", + "221 Long Short-Term Memory as a Dynamically Comput... \n", + "297 Size-free generalization bounds for convolutio... \n", + "313 Fluctuation-dissipation relations for stochast... \n", + "335 The Singular Values of Convolutional Layers \n", + "349 CONTROLLING COVARIATE SHIFT USING EQUILIBRIUM ... \n", + "519 Robust Natural Language Representation Learnin... \n", + "532 Accelerating Convolutional Neural Networks usi... \n", + "585 JAUNE: Justified And Unified Neural language E... \n", + "596 Not-So-Random Features \n", + "\n", + " Clean_Text \\\n", + "61 We study the robustness to symmetric label noi... \n", + "125 Recent research has shown that CNNs are often ... \n", + "221 Long short-term memory networks were introduce... \n", + "297 We prove bounds on the generalization error of... \n", + "313 The notion of the stationary equilibrium ensem... \n", + "335 We characterize the singular values of the lin... \n", + "349 We introduce a new normalization technique tha... \n", + "519 In natural language inference, the semantics o... \n", + "532 We present the iterative two-pass decompositio... \n", + "585 We review the limitations of BLEU and ROUGE --... \n", + "596 We propose a principled method for kernel lear... \n", + "\n", + " Clean_Summary \\\n", + "61 We apply loss correction to graph neural netwo... \n", + "125 We introduce a smoothness regularization for c... \n", + "221 Gates do all the heavy lifting in LSTMs by com... \n", + "297 We prove generalization bounds for convolution... \n", + "313 We prove fluctuation-dissipation relations for... \n", + "335 We characterize the singular values of the lin... \n", + "349 An alternative normalization technique to batc... \n", + "519 We use neural networks to project superficial ... \n", + "532 We present the iterative two-pass CP decomposi... \n", + "585 Introduces JAUNE: a methodology to replace BLE... \n", + "596 A simple and practical algorithm for learning ... \n", + "\n", + " L_Clean_Text \\\n", + "61 ['we study the robustness to symmetric label n... \n", + "125 ['recent research has shown that cnns are ofte... \n", + "221 ['long short-term memory networks (lstms) were... \n", + "297 ['we prove bounds on the generalization error ... \n", + "313 ['the notion of the stationary equilibrium ens... \n", + "335 ['we characterize the singular values of the l... \n", + "349 ['we introduce a new normalization technique t... \n", + "519 ['in natural language inference, the semantics... \n", + "532 ['we present the iterative two-pass decomposit... \n", + "585 ['we review the limitations of bleu and rouge ... \n", + "596 ['we propose a principled method for kernel le... \n", + "\n", + " L_Clean_Summary \\\n", + "61 ['we apply loss correction to graph neural net... \n", + "125 ['we introduce a smoothness regularization for... \n", + "221 ['gates do all the heavy lifting in lstms by c... \n", + "297 ['we prove generalization bounds for convoluti... \n", + "313 ['we prove fluctuation-dissipation relations f... \n", + "335 ['we characterize the singular values of the l... \n", + "349 ['an alternative normalization technique to ba... \n", + "519 ['we use neural networks to project superficia... \n", + "532 ['we present the iterative two-pass cp decompo... \n", + "585 ['introduces jaune: a methodology to replace b... \n", + "596 ['a simple and practical algorithm for learnin... \n", + "\n", + " L_Clean_Title \\\n", + "61 learning graph neural networks with noisy labels \n", + "125 smooth kernels improve adversarial robustness ... \n", + "221 long short-term memory as a dynamically comput... \n", + "297 size-free generalization bounds for convolutio... \n", + "313 fluctuation-dissipation relations for stochast... \n", + "335 the singular values of convolutional layers \n", + "349 controlling covariate shift using equilibrium ... \n", + "519 robust natural language representation learnin... \n", + "532 accelerating convolutional neural networks usi... \n", + "585 jaune: justified and unified neural language e... \n", + "596 not-so-random features \n", + "\n", + " T_Clean_Text \\\n", + "61 [We, study, the, robustness, to, symmetric, la... \n", + "125 [Recent, research, has, shown, that, CNNs, are... \n", + "221 [Long, short-term, memory, networks, were, int... \n", + "297 [We, prove, bounds, on, the, generalization, e... \n", + "313 [The, notion, of, the, stationary, equilibrium... \n", + "335 [We, characterize, the, singular, values, of, ... \n", + "349 [We, introduce, a, new, normalization, techniq... \n", + "519 [In, natural, language, inference, ,, the, sem... \n", + "532 [We, present, the, iterative, two-pass, decomp... \n", + "585 [We, review, the, limitations, of, BLEU, and, ... \n", + "596 [We, propose, a, principled, method, for, kern... \n", + "\n", + " T_Clean_Summary Len_SciTLDR_Val_Text \\\n", + "61 [We, apply, loss, correction, to, graph, neura... 48 \n", + "125 [We, introduce, a, smoothness, regularization,... 89 \n", + "221 [Gates, do, all, the, heavy, lifting, in, LSTM... 134 \n", + "297 [We, prove, generalization, bounds, for, convo... 81 \n", + "313 [We, prove, fluctuation-dissipation, relations... 99 \n", + "335 [We, characterize, the, singular, values, of, ... 72 \n", + "349 [An, alternative, normalization, technique, to... 59 \n", + "519 [We, use, neural, networks, to, project, super... 88 \n", + "532 [We, present, the, iterative, two-pass, CP, de... 75 \n", + "585 [Introduces, JAUNE, :, a, methodology, to, rep... 56 \n", + "596 [A, simple, and, practical, algorithm, for, le... 70 \n", + "\n", + " Len_SciTLDR_Val_Summary RangEe RangEe_TexT_SummarY \n", + "61 60 -12 False \n", + "125 86 3 False \n", + "221 150 -16 False \n", + "297 81 0 False \n", + "313 93 6 False \n", + "335 76 -4 False \n", + "349 49 10 False \n", + "519 79 9 False \n", + "532 73 2 False \n", + "585 50 6 False \n", + "596 91 -21 False " + ] + }, + "execution_count": 554, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev[~NN_SciTLDR_dev['RangEe_TexT_SummarY']]\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "32e5a970", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 555, + "id": "6741e1fb", + "metadata": {}, + "outputs": [], + "source": [ + "A = NN_SciTLDR_test[~NN_SciTLDR_test['RangEe_TexT_SummarY']].index" + ] + }, + { + "cell_type": "code", + "execution_count": 556, + "id": "863d9a7a", + "metadata": {}, + "outputs": [], + "source": [ + "B = NN_SciTLDR_dev[~NN_SciTLDR_dev['RangEe_TexT_SummarY']].index" + ] + }, + { + "cell_type": "code", + "execution_count": 557, + "id": "89c5908a", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_test = NN_SciTLDR_test.drop(A)" + ] + }, + { + "cell_type": "code", + "execution_count": 558, + "id": "4065053a", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_dev = NN_SciTLDR_dev.drop(B)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "292b9733", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "86a421a3", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 559, + "id": "baf29438", + "metadata": {}, + "outputs": [], + "source": [ + "# New column\n", + "\n", + "Len_SciTLDR_train_Textt=[]\n", + "Len_SciTLDR_train_Summaryy=[]\n", + "\n", + "Len_SciTLDR_train_Textt = NN_SciTLDR_train['Len_SciTLDR_train_Text'] \n", + "Len_SciTLDR_train_Summaryy = NN_SciTLDR_train['Len_SciTLDR_train_Summary']\n", + "\n", + "\n", + "Len_SciTLDR_test_Textt = []\n", + "Len_SciTLDR_test_Summaryy = []\n", + "\n", + "Len_SciTLDR_test_Textt = NN_SciTLDR_test['Len_SciTLDR_test_Text'] \n", + "Len_SciTLDR_test_Summaryy = NN_SciTLDR_test['Len_SciTLDR_test_Summary'] \n", + "\n", + "Len_SciTLDR_Val_Textt=[]\n", + "Len_SciTLDR_Val_Summaryy=[]\n", + "\n", + "Len_SciTLDR_Val_Textt = NN_SciTLDR_dev['Len_SciTLDR_Val_Text'] \n", + "Len_SciTLDR_Val_Summaryy = NN_SciTLDR_dev['Len_SciTLDR_Val_Summary'] \n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a60277f9", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "2292ac57", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "markdown", + "id": "d919c011", + "metadata": {}, + "source": [ + "# BuilD HisT \n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "695ceaf2", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 560, + "id": "7013d9b4", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA90AAAHqCAYAAAAZLi26AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy88F64QAAAACXBIWXMAAA9hAAAPYQGoP6dpAABhgElEQVR4nO3dfXyP9f////tObMO8NsPOMnOamZMIzSudiDFaSta79BZT4p1GoaSVnMbk04nU0Ckq8k5FJTmbk75lxGo5i5Cad2xT2oYYtuP3h9+OetmcTK9jr21u18vluFwcz+fzdRyP53Oz5/F4HWduhmEYAgAAAAAATufu6gAAAAAAAKisSLoBAAAAALAISTcAAAAAABYh6QYAAAAAwCIk3QAAAAAAWISkGwAAAAAAi5B0AwAAAABgEZJuAAAAAAAsQtINAAAAAIBFSLpxxenUqZM6derk6jAqrHXr1snNzU3r1q1zdSi4RD///LPc3Nz0/PPPuzoUAJUA8+g/wzyKsjR+/Hi5ubnpt99+c3UoVzSSblQI27Zt01133aXw8HD5+PjoqquuUteuXfXKK6/8o+126tRJbm5uF13Gjx8vSapfv75uu+22C25zwIABDp/19fVVw4YNddddd+mjjz5SYWHhReOoWrWqWrVqpenTp5fY/mJmzpypuXPnlvpzrjR37txL+lnUr1/faftctmyZ+bO9FJ06dVKLFi2ctn9nK21/AFw5mEdLpyLOo0UKCwv1zjvvKCoqSgEBAapRo4auvvpq9e/fXxs3bnR1eBVSRfjyesqUKVqyZImrw8B5eLo6AOBiNmzYoFtuuUX16tXToEGDFBwcrAMHDmjjxo16+eWXNWzYsFJtb+XKlea/n376aT344IPm+ubNmzVjxgw99dRTatasmVneqlWrUu3D29tbb775piTpxIkT+uWXX/TZZ5/prrvuUqdOnfTJJ5/IZrM5fKZu3bpKSkqSJP32229asGCBRowYocOHD2vy5Mml2v/MmTNVu3ZtDRgwoFSfuxQ33XSTTpw4IS8vL6dv991333Uoe/DBB3Xddddp8ODBZpmvr6/T9rls2TIlJydXmkS1svUHgHMwj14Z82iRRx55RMnJybrjjjvUt29feXp6avfu3friiy/UsGFDdejQwZL9wrWmTJmiu+66S7169XJ1KCgBSTfKvcmTJ8vPz0+bN2+Wv7+/Q112dnapt/f3Sa5r164OdT4+PpoxY4a6du36jy6d8/T01H333edQ9uyzz2rq1KlKTEzUoEGD9N///teh3s/Pz+EzDz30kCIiIvTKK69o4sSJ8vDwuOx4LuT48eOqXr36Jbd3d3eXj4+P0+No2LChGjZs6FD20EMPqWHDhsXGEgBw6ZhHr4x5VJKysrI0c+ZMDRo0SK+//rpD3fTp03X48GFL9luenTx5Ul5eXnJ35wJfuA6/fSj39u3bp+bNmxc7UJCkwMDAYmXvvfeerrvuOlWrVk01a9bUTTfd5PCtvCvvRXvyySfVrVs3LVq0SD/++OMF2/r4+Kh9+/Y6evRoqQ6K6tevrx07dmj9+vXmZXZF/S26hHv9+vV6+OGHFRgYqLp160qSfvnlFz388MNq2rSpqlatqlq1aulf//qXfv75Z4ftl3QvWtFl1zt37tQtt9yiatWq6aqrrtK0adMuOe5L9euvv+qBBx5QUFCQvL291bx5c7399ttm/YkTJxQREaGIiAidOHHCLD9y5IhCQkJ0/fXXq6CgQAMGDFBycrIkOVyS6AxffPGFbrzxRlWvXl01atRQbGysduzY4dBmwIAB8vX11a+//qpevXrJ19dXderU0eOPP66CggKHtr///rv69esnm80mf39/xcfH6/vvv5ebm5t5+eOl9uf1119Xo0aN5O3trfbt22vz5s1O6TOA8ot59MqZR/fv3y/DMNSxY8didW5ubg4/76J7fc9V1Me/x110W8C6devUrl07Va1aVS1btjT78PHHH6tly5by8fFR27Zt9d133zlss2jOy8jI0G233SZfX19dddVV5ry1bds2de7cWdWrV1d4eLgWLFjg8PkjR47o8ccfV8uWLeXr6yubzaYePXro+++/d2hXNLYLFy7UmDFjdNVVV6latWpKT0+Xm5ubXnrppWL93bBhg9zc3PT+++9feHAvQX5+vsaNG6fGjRvL29tbYWFheuKJJ5Sfn+/Qzs3NTUOHDtWSJUvUokUL83hm+fLlxbZZNOY+Pj5q1KiRXnvttWI/Ozc3Nx0/flzz5s0zf2fPvUojJydHAwYMkL+/v/z8/HT//ffrzz///Md9xqXhTDfKvfDwcKWmpmr79u0XvZ92woQJGj9+vK6//npNnDhRXl5e2rRpk9asWaNu3bqVUcQX1q9fP61cuVKrVq3S1VdffcG2RfcQlXSgdD7Tp0/XsGHD5Ovrq6efflqSFBQU5NDm4YcfVp06dTR27FgdP35c0tlLAjds2KA+ffqobt26+vnnnzVr1ix16tRJO3fuVLVq1S643z/++EPdu3dX7969dffdd+vDDz/U6NGj1bJlS/Xo0eOS47+QrKwsdejQwZys6tSpoy+++EIDBw5UXl6ehg8frqpVq2revHnq2LGjnn76ab344ouSpISEBOXm5mru3Lny8PDQf/7zHx08eFCrVq0qdln7P/Huu+8qPj5eMTExeu655/Tnn39q1qxZuuGGG/Tdd9853JNeUFCgmJgYRUVF6fnnn9fq1av1wgsvqFGjRhoyZIiks/fm9ezZU998842GDBmiiIgIffLJJ4qPj3fY76X0Z8GCBTp69Kj+85//yM3NTdOmTVPv3r31008/qUqVKk4bAwDlC/PolTOPhoeHS5IWLVqkf/3rXxfdZ2ns3btX//73v/Wf//xH9913n55//nn17NlTs2fP1lNPPaWHH35YkpSUlKS7775bu3fvdji7XFBQoB49euimm27StGnTNH/+fA0dOlTVq1fX008/rb59+6p3796aPXu2+vfvL7vdrgYNGkiSfvrpJy1ZskT/+te/1KBBA2VlZem1117TzTffrJ07dyo0NNQh1kmTJsnLy0uPP/648vPzFRERoY4dO2r+/PkaMWKEQ9v58+erRo0auuOOO/7R+BQWFur222/XV199pcGDB6tZs2batm2bXnrpJf3444/F7rf+6quv9PHHH+vhhx9WjRo1NGPGDMXFxSkjI0O1atWSJH333Xfq3r27QkJCNGHCBBUUFGjixImqU6eOw7befffdYrfkNWrUyKHN3XffrQYNGigpKUnffvut3nzzTQUGBuq55577R/3GJTKAcm7lypWGh4eH4eHhYdjtduOJJ54wVqxYYZw6dcqh3Z49ewx3d3fjzjvvNAoKChzqCgsLzX/ffPPNxs0331zivhYtWmRIMtauXVtifXh4uBEbG3vBeOPj443q1auft/67774zJBkjRoxwiCkiIsI4fPiwcfjwYWPXrl3GqFGjDEkX3V9JmjdvXmIf58yZY0gybrjhBuPMmTMOdX/++Wex9qmpqYYk45133jHL1q5dW2yMbr755mLt8vPzjeDgYCMuLq7U8RepXr26ER8fb64PHDjQCAkJMX777TeHdn369DH8/Pwc+pCYmGi4u7sbX375pflznT59usPnEhISjNL8Gbz55puN5s2bn7f+6NGjhr+/vzFo0CCH8szMTMPPz8+hPD4+3pBkTJw40aFtmzZtjLZt25rrH330UbHYCwoKjM6dOxuSjDlz5ly0P/v37zckGbVq1TKOHDliln/yySeGJOOzzz67eOcBVFjMo1fWPNq/f39DklGzZk3jzjvvNJ5//nnjhx9+KNZu3LhxJc4ZRX3cv3+/WRYeHm5IMjZs2GCWrVixwpBkVK1a1fjll1/M8tdee61Y/4rmvClTpphlf/zxh1G1alXDzc3NWLhwoVm+a9cuQ5Ixbtw4s+zkyZPFfif3799veHt7O8yjRWPbsGHDYj+Porj+PhanTp0yateu7XCsUZKiefT//u//ztvm3XffNdzd3Y3/9//+n0P57NmzDUnG119/bZZJMry8vIy9e/eaZd9//70hyXjllVfMsp49exrVqlUzfv31V7Nsz549hqenZ7Gf3bnHTEWKfs4PPPCAQ/mdd95p1KpV64L9hvNweTnKva5duyo1NVW33367vv/+e02bNk0xMTG66qqr9Omnn5rtlixZosLCQo0dO7bYfTvOumzYGYoeBHb06FGH8l27dqlOnTqqU6eOIiIi9H//93+6/fbbLXl66qBBg4rd21a1alXz36dPn9bvv/+uxo0by9/fX99+++1Ft+nr6+twL52Xl5euu+46/fTTT06J2TAMffTRR+rZs6cMw9Bvv/1mLjExMcrNzXWIc/z48WrevLni4+P18MMP6+abb9YjjzzilFjOZ9WqVcrJydG9997rEJ+Hh4eioqK0du3aYp956KGHHNZvvPFGhzFbvny5qlSpokGDBpll7u7uSkhIKHV899xzj2rWrOmwL0lO+xkBKJ+YR+c6PYbyPI/OmTNHr776qho0aKDFixfr8ccfV7NmzdSlSxf9+uuvpdrW30VGRsput5vrUVFRkqTOnTurXr16xcpLivvvD93z9/dX06ZNVb16dd19991medOmTeXv7+/weW9vb/N3sqCgQL///rt8fX3VtGnTEsc2Pj7e4echnT3T6+Pjo/nz55tlK1as0G+//eaUZ8csWrRIzZo1U0REhMMxQOfOnSWp2DFAdHS0w9noVq1ayWazmf0uKCjQ6tWr1atXL4cz+Y0bN76sKwhLOt74/ffflZeXV+ptofRIulEhtG/fXh9//LH++OMPffPNN0pMTNTRo0d11113aefOnZLO3rPm7u6uyMhIF0d7YceOHZMk1ahRw6G8fv36WrVqlVasWKGZM2fqqquu0uHDhy152ErR5Vp/d+LECY0dO1ZhYWHy9vZW7dq1VadOHeXk5Cg3N/ei26xbt26xg7KaNWvqjz/+cErMhw8fVk5Ojl5//XXzoKpouf/++yU5PhDIy8tLb7/9tvbv36+jR49qzpw5lh807tmzR9LZA5BzY1y5cmWxewp9fHyKXSJ27pj98ssvCgkJKXaJYOPGjUsd398Pior2JclpPyMA5RfzqHOV53m06IvZtLQ0/fbbb/rkk0/Uo0cPrVmzRn369CnVtv7u3DnEz89PkhQWFlZi+blxlzTn+fn5ldhvPz8/h88XFhbqpZdeUpMmTRzGduvWrSWObUk/H39/f/Xs2dPhfvH58+frqquuMhPjf2LPnj3asWNHsfm/6BaIc48Bzh1PyfHnnZ2drRMnTpQ433MMUPFwTzcqFC8vL7Vv317t27fX1Vdfrfvvv1+LFi3SuHHjXB3aJdu+fbuk4n8wq1evrujoaHO9Y8eOuvbaa/XUU09pxowZTo3h3G9/JWnYsGGaM2eOhg8fLrvdLj8/P7m5ualPnz6X9I7T8z0V1jCMfxyvJDOG++67r9j9zEXOfSXNihUrJJ19cumePXtKnISdqSjGd999V8HBwcXqPT0d/+Ra9STd87H6ZwSg/GMedY6KMo/WqlVLt99+u26//XZ16tRJ69ev1y+//KLw8PDzfhF97sM8Lxbfpcb9Tz4/ZcoUPfPMM3rggQc0adIkBQQEyN3dXcOHDy9xbEv6+UhS//79tWjRIm3YsEEtW7bUp59+qocfftgpTzYvLCxUy5YtzWfJnOvcLyfKek7mGMC1SLpRYbVr106SdOjQIUlnHxhRWFionTt3qnXr1i6M7MLeffddubm5FXvNyrlatWql++67T6+99poef/zxEr8RPZ/LOaP74YcfKj4+Xi+88IJZdvLkSeXk5JR6W1aoU6eOatSooYKCAoeDqvPZunWrJk6cqPvvv1/p6el68MEHtW3bNvMbeMn5l0sWXSYWGBh4STFeivDwcK1du1Z//vmnw9nuvXv3Fmtbni7/BFD+MY+eX2WcR9u1a6f169fr0KFDCg8PN8905uTkODxo7pdffnFRhOf34Ycf6pZbbtFbb73lUJ6Tk6PatWtf8na6d++uOnXqaP78+YqKitKff/6pfv36OSXGRo0a6fvvv1eXLl2cMh8HBgbKx8enxPmeY4CKh8vLUe6tXbu2xG/hli1bJunsvT+S1KtXL7m7u2vixInFvvUsL9/iTZ06VStXrtQ999yjJk2aXLT9E088odOnT5/3W9PzqV69eqkneQ8Pj2Lj9Morr5z3G++y5uHhobi4OH300UfmWY6/+/u7R0+fPq0BAwYoNDRUL7/8subOnausrKxiTywteq+qsw6IYmJiZLPZNGXKFJ0+ffqCMZZmm6dPn9Ybb7xhlhUWFpqvWfk7Z/cHQOXAPHrlzKOZmZnm7QJ/d+rUKaWkpMjd3d28QqDoi+Ivv/zSbFf02qnypqSxXbRoUanvUff09NS9996rDz74QHPnzlXLli2LXSV3ue6++279+uuvDvN1kRMnTphPub9UHh4eio6O1pIlS3Tw4EGzfO/evfriiy+Ktb+c31mUHc50o9wbNmyY/vzzT915552KiIjQqVOntGHDBv33v/9V/fr1zft5GzdurKefflqTJk3SjTfeqN69e8vb21ubN29WaGiokpKSnBLP3r179eyzzxYrb9OmjWJjYyVJZ86c0XvvvSfp7Lfcv/zyiz799FNt3bpVt9xyi15//fVL2ldkZKRuvfVWvfnmm3rmmWfMV0hcTNu2bTVr1iw9++yzaty4sQIDAy96v9Jtt92md999V35+foqMjFRqaqpWr159yfssC1OnTtXatWsVFRWlQYMGKTIyUkeOHNG3336r1atX68iRI5KkZ599Vunp6UpJSVGNGjXUqlUrjR07VmPGjNFdd92lW2+9VdLZcZKkRx55RDExMfLw8Ljo/W6HDx8u8effoEED9e3bV7NmzVK/fv107bXXqk+fPqpTp44yMjL0+eefq2PHjnr11VdL1edevXrpuuuu02OPPaa9e/cqIiJCn376qdnXv3+zfTn9AVD5MY9eOfPo//73P1133XXq3LmzunTpouDgYGVnZ+v999/X999/r+HDh5tnhrt166Z69epp4MCBGjVqlDw8PPT222+b81Z5ctttt5lXr11//fXatm2b5s+fr4YNG5Z6W/3799eMGTO0du3aUr8uKyUlRSdPnixW3qtXL/Xr108ffPCBHnroIa1du1YdO3ZUQUGBdu3apQ8++EArVqwwry65VOPHj9fKlSvVsWNHDRkyRAUFBXr11VfVokULpaenO7Rt27atVq9erRdffFGhoaFq0KCB+VA7lANl/8B0oHS++OIL44EHHjAiIiIMX19fw8vLy2jcuLExbNgwIysrq1j7t99+22jTpo3h7e1t1KxZ07j55puNVatWmfX/9FUnkkpcBg4caBjGX6/FKFqqVatm1K9f34iLizM+/PDDYq+8KIrpfK+iWrduXbFXZ1xMZmamERsba9SoUcOQZPa36DUgmzdvLvaZP/74w7j//vuN2rVrG76+vkZMTIyxa9cuIzw83OEVFOd71UlJ8cfHxxvh4eGXHPe5Snr9RVZWlpGQkGCEhYUZVapUMYKDg40uXboYr7/+umEYhpGWlmZ4enoaw4YNc/jcmTNnjPbt2xuhoaHGH3/8YZYNGzbMqFOnjuHm5nbR14cVvdKlpKVLly5mu7Vr1xoxMTGGn5+f4ePjYzRq1MgYMGCAsWXLFoexKemVOCW9wuXw4cPGv//9b6NGjRqGn5+fMWDAAOPrr782JDm8ZuV8/bnQq05K+7sFoOJhHr1y5tG8vDzj5ZdfNmJiYoy6desaVapUMWrUqGHY7XbjjTfecHj1m2GcnTOjoqIMLy8vo169esaLL7543leGlfTqNUlGQkKCQ1lJc8755rzz9fvc/Z08edJ47LHHjJCQEKNq1apGx44djdTU1GK/i0Vju2jRoguOU/PmzQ13d3fjf//73wXbndun8y3vvvuuYRhnX0H23HPPGc2bNzf//7Rt29aYMGGCkZuba26vpHEr6ve5xz0pKSlGmzZtDC8vL6NRo0bGm2++aTz22GOGj4+PQ7tdu3YZN910k1G1alVDkrmdouOKw4cPO7Qv6ecM67gZRjm5XggAcMmWLFmiO++8U1999ZU6duzo6nAAAKgw2rRpo4CAAKWkpLg6lMvSq1cv7dixw3xrCso/7ukGgHLuxIkTDusFBQV65ZVXZLPZdO2117ooKgAAKp4tW7YoPT1d/fv3d3Uol+TcY4A9e/Zo2bJl6tSpk2sCwmXhnm6gAjl8+PAFH8ji5eWlgICAMozo0lXk2F1t2LBhOnHihOx2u/Lz8/Xxxx9rw4YNmjJlynlfiwIAKK4iz0UVOfbyYPv27UpLS9MLL7ygkJAQ3XPPPa4O6ZI0bNhQAwYMUMOGDfXLL79o1qxZ8vLy0hNPPOHq0FAKJN1ABdK+ffsLvsrj5ptv1rp168ouoFKoyLG7WufOnfXCCy9o6dKlOnnypBo3bqxXXnlFQ4cOdXVoAFChVOS5qCLHXh58+OGHmjhxopo2bar3339fPj4+rg7pknTv3l3vv/++MjMz5e3tLbvdrilTplzS0/tRfnBPN1CBfP3118UuM/q7mjVrmk+wLm8qcuwAgMqhIs9FFTl24EpH0g0AAAAAgEV4kBoAAAAAABbhnm5JhYWFOnjwoGrUqCE3NzdXhwMAgMkwDB09elShoaFyd7/yvitnjgYAlFeXOkeTdEs6ePCgwsLCXB0GAADndeDAAdWtW9fVYZQ55mgAQHl3sTmapFtSjRo1JJ0dLJvN5uJoAAD4S15ensLCwsy56krDHA0AKK8udY4m6ZbMy9VsNhsTOgCgXLpSL61mjgYAlHcXm6OvvJvDAAAAAAAoIyTdAAAAAABYhKQbAAAAAACLkHQDAAAAAGARkm4AAFCiX3/9Vffdd59q1aqlqlWrqmXLltqyZYtZbxiGxo4dq5CQEFWtWlXR0dHas2ePwzaOHDmivn37ymazyd/fXwMHDtSxY8fKuisAALgMSTcAACjmjz/+UMeOHVWlShV98cUX2rlzp1544QXVrFnTbDNt2jTNmDFDs2fP1qZNm1S9enXFxMTo5MmTZpu+fftqx44dWrVqlZYuXaovv/xSgwcPdkWXAABwiXKTdE+dOlVubm4aPny4WXby5EklJCSoVq1a8vX1VVxcnLKyshw+l5GRodjYWFWrVk2BgYEaNWqUzpw5U8bRAwBQuTz33HMKCwvTnDlzdN1116lBgwbq1q2bGjVqJOnsWe7p06drzJgxuuOOO9SqVSu98847OnjwoJYsWSJJ+uGHH7R8+XK9+eabioqK0g033KBXXnlFCxcu1MGDB13YOwAAyk65SLo3b96s1157Ta1atXIoHzFihD777DMtWrRI69ev18GDB9W7d2+zvqCgQLGxsTp16pQ2bNigefPmae7cuRo7dmxZdwEAgErl008/Vbt27fSvf/1LgYGBatOmjd544w2zfv/+/crMzFR0dLRZ5ufnp6ioKKWmpkqSUlNT5e/vr3bt2pltoqOj5e7urk2bNpW43/z8fOXl5TksAABUZC5Puo8dO6a+ffvqjTfecLhkLTc3V2+99ZZefPFFde7cWW3bttWcOXO0YcMGbdy4UZK0cuVK7dy5U++9955at26tHj16aNKkSUpOTtapU6dc1SUAACq8n376SbNmzVKTJk20YsUKDRkyRI888ojmzZsnScrMzJQkBQUFOXwuKCjIrMvMzFRgYKBDvaenpwICAsw250pKSpKfn5+5hIWFObtrAACUKZcn3QkJCYqNjXX4plyS0tLSdPr0aYfyiIgI1atXz+Eb9JYtWzpM+DExMcrLy9OOHTvKpgMAAFRChYWFuvbaazVlyhS1adNGgwcP1qBBgzR79mxL95uYmKjc3FxzOXDggKX7AwDAap6u3PnChQv17bffavPmzcXqMjMz5eXlJX9/f4fyc79BL+kb9qK688nPz1d+fr65zqVrAAA4CgkJUWRkpENZs2bN9NFHH0mSgoODJUlZWVkKCQkx22RlZal169Zmm+zsbIdtnDlzRkeOHDE/fy5vb295e3s7qxsAALicy850HzhwQI8++qjmz58vHx+fMt03l64BAHBhHTt21O7dux3KfvzxR4WHh0uSGjRooODgYKWkpJj1eXl52rRpk+x2uyTJbrcrJydHaWlpZps1a9aosLBQUVFRZdALAABcz2VJd1pamrKzs3XttdfK09NTnp6eWr9+vWbMmCFPT08FBQXp1KlTysnJcfhcVlaW+e14cHBwsaeZF62f7xt0iUvXAAC4mBEjRmjjxo2aMmWK9u7dqwULFuj1119XQkKCJJlvHHn22Wf16aefatu2berfv79CQ0PVq1cvSWfPjHfv3l2DBg3SN998o6+//lpDhw5Vnz59FBoa6sLeAQBQdlx2eXmXLl20bds2h7L7779fERERGj16tMLCwlSlShWlpKQoLi5OkrR7925lZGQ4fIM+efJkZWdnmw9qWbVqlWw2W7FL4v6OS9cAALiw9u3ba/HixUpMTNTEiRPVoEEDTZ8+XX379jXbPPHEEzp+/LgGDx6snJwc3XDDDVq+fLnDFWzz58/X0KFD1aVLF7m7uysuLk4zZsxwRZcAAHAJN8MwDFcHUaRTp05q3bq1pk+fLkkaMmSIli1bprlz58pms2nYsGGSpA0bNkg6+8qw1q1bKzQ0VNOmTVNmZqb69eunBx98UFOmTLnk/ebl5cnPz0+5ubmy2WxO7xcAAJfrSp+jrvT+AwDKr0udo1z6ILWLeemll8xvxfPz8xUTE6OZM2ea9R4eHlq6dKmGDBkiu92u6tWrKz4+XhMnTnRh1AAAAAAAnFWuznS7Ct+iAwDKqyt9jrrS+w8AKL8qxZlu4HLVf/Jzp23r56mxTtsWAADnw9wFAJWTy55eDgAAAABAZUfSDQAAAACARUi6AQAAAACwCEk3AAAAAAAWIekGAAAAAMAiJN0AAAAAAFiEpBsAAAAAAIuQdAMAAAAAYBGSbgAAAAAALELSDQAAAACARUi6AQAAAACwCEk3AAAAAAAWIekGAAAAAMAiJN0AAAAAAFiEpBsAAAAAAIuQdAMAAAAAYBGSbgAAAAAALELSDQAAAACARUi6AQAAAACwCEk3AAAAAAAWIekGAAAAAMAiJN0AAAAAAFiEpBsAAAAAAIuQdAMAAAAAYBGSbgAAAAAALELSDQAAAACARUi6AQAAAACwCEk3AAAAAAAWIekGAAAAAMAiJN0AAAAAAFiEpBsAAAAAAIuQdAMAAAAAYBGSbgAAAAAALELSDQAAAACARUi6AQAAAACwCEk3AAAAAAAWIekGAAAAAMAiLk26Z82apVatWslms8lms8lut+uLL74w6zt16iQ3NzeH5aGHHnLYRkZGhmJjY1WtWjUFBgZq1KhROnPmTFl3BQAAAACAYjxdufO6detq6tSpatKkiQzD0Lx583THHXfou+++U/PmzSVJgwYN0sSJE83PVKtWzfx3QUGBYmNjFRwcrA0bNujQoUPq37+/qlSpoilTppR5fwAAAAAA+DuXJt09e/Z0WJ88ebJmzZqljRs3mkl3tWrVFBwcXOLnV65cqZ07d2r16tUKCgpS69atNWnSJI0ePVrjx4+Xl5eX5X0AAAAAAOB8ys093QUFBVq4cKGOHz8uu91uls+fP1+1a9dWixYtlJiYqD///NOsS01NVcuWLRUUFGSWxcTEKC8vTzt27CjT+AEAAAAAOJdLz3RL0rZt22S323Xy5En5+vpq8eLFioyMlCT9+9//Vnh4uEJDQ7V161aNHj1au3fv1scffyxJyszMdEi4JZnrmZmZ591nfn6+8vPzzfW8vDxndwsAAAAAANcn3U2bNlV6erpyc3P14YcfKj4+XuvXr1dkZKQGDx5stmvZsqVCQkLUpUsX7du3T40aNbrsfSYlJWnChAnOCB8AAAAAgPNy+eXlXl5eaty4sdq2baukpCRdc801evnll0tsGxUVJUnau3evJCk4OFhZWVkObYrWz3cfuCQlJiYqNzfXXA4cOOCMrgAAAAAA4MDlZ7rPVVhY6HDp99+lp6dLkkJCQiRJdrtdkydPVnZ2tgIDAyVJq1atks1mMy9RL4m3t7e8vb2dGzj+kfpPfu7qEAAAAADA6VyadCcmJqpHjx6qV6+ejh49qgULFmjdunVasWKF9u3bpwULFujWW29VrVq1tHXrVo0YMUI33XSTWrVqJUnq1q2bIiMj1a9fP02bNk2ZmZkaM2aMEhISSKoBAAAAAC7n0qQ7Oztb/fv316FDh+Tn56dWrVppxYoV6tq1qw4cOKDVq1dr+vTpOn78uMLCwhQXF6cxY8aYn/fw8NDSpUs1ZMgQ2e12Va9eXfHx8Q7v9QYAAAAAwFVcmnS/9dZb560LCwvT+vXrL7qN8PBwLVu2zJlhAQAAAADgFC5/kBoAAAAAAJUVSTcAAAAAABYh6QYAAAAAwCIk3QAAoJjx48fLzc3NYYmIiDDrT548qYSEBNWqVUu+vr6Ki4tTVlaWwzYyMjIUGxuratWqKTAwUKNGjdKZM2fKuisAALhUuXtPNwAAKB+aN2+u1atXm+uenn8dNowYMUKff/65Fi1aJD8/Pw0dOlS9e/fW119/LUkqKChQbGysgoODtWHDBh06dEj9+/dXlSpVNGXKlDLvCwAArkLSDQAASuTp6ang4OBi5bm5uXrrrbe0YMECde7cWZI0Z84cNWvWTBs3blSHDh20cuVK7dy5U6tXr1ZQUJBat26tSZMmafTo0Ro/fry8vLzKujsAALgEl5cDAIAS7dmzR6GhoWrYsKH69u2rjIwMSVJaWppOnz6t6Ohos21ERITq1aun1NRUSVJqaqpatmypoKAgs01MTIzy8vK0Y8eOsu0IAAAuxJluAABQTFRUlObOnaumTZvq0KFDmjBhgm688UZt375dmZmZ8vLykr+/v8NngoKClJmZKUnKzMx0SLiL6ovqzic/P1/5+fnmel5enpN6BACAa5B0AxdR/8nPnbatn6fGOm1bAGClHj16mP9u1aqVoqKiFB4erg8++EBVq1a1bL9JSUmaMGGCZdsHAKCscXk5AAC4KH9/f1199dXau3evgoODderUKeXk5Di0ycrKMu8BDw4OLvY086L1ku4TL5KYmKjc3FxzOXDggHM7AgBAGSPpBgAAF3Xs2DHt27dPISEhatu2rapUqaKUlBSzfvfu3crIyJDdbpck2e12bdu2TdnZ2WabVatWyWazKTIy8rz78fb2ls1mc1gAAKjIuLwcAAAU8/jjj6tnz54KDw/XwYMHNW7cOHl4eOjee++Vn5+fBg4cqJEjRyogIEA2m03Dhg2T3W5Xhw4dJEndunVTZGSk+vXrp2nTpikzM1NjxoxRQkKCvL29Xdw7AADKDkk3AAAo5n//+5/uvfde/f7776pTp45uuOEGbdy4UXXq1JEkvfTSS3J3d1dcXJzy8/MVExOjmTNnmp/38PDQ0qVLNWTIENntdlWvXl3x8fGaOHGiq7oEAIBLkHQDAIBiFi5ceMF6Hx8fJScnKzk5+bxtwsPDtWzZMmeHBgBAhcI93QAAAAAAWISkGwAAAAAAi5B0AwAAAABgEZJuAAAAAAAsQtINAAAAAIBFSLoBAAAAALAISTcAAAAAABYh6QYAAAAAwCIk3QAAAAAAWISkGwAAAAAAi5B0AwAAAABgEZJuAAAAAAAsQtINAAAAAIBFSLoBAAAAALAISTcAAAAAABYh6QYAAAAAwCIk3QAAAAAAWISkGwAAAAAAi5B0AwAAAABgEZJuAAAAAAAsQtINAAAAAIBFPF0dAAAAAJyr/pOfO21bP0+Nddq2AOBKxJluAAAAAAAsQtINAAAAAIBFXJp0z5o1S61atZLNZpPNZpPdbtcXX3xh1p88eVIJCQmqVauWfH19FRcXp6ysLIdtZGRkKDY2VtWqVVNgYKBGjRqlM2fOlHVXAAAAAAAoxqVJd926dTV16lSlpaVpy5Yt6ty5s+644w7t2LFDkjRixAh99tlnWrRokdavX6+DBw+qd+/e5ucLCgoUGxurU6dOacOGDZo3b57mzp2rsWPHuqpLAAAAAACYXPogtZ49ezqsT548WbNmzdLGjRtVt25dvfXWW1qwYIE6d+4sSZozZ46aNWumjRs3qkOHDlq5cqV27typ1atXKygoSK1bt9akSZM0evRojR8/Xl5eXq7oFgAAAAAAksrRPd0FBQVauHChjh8/LrvdrrS0NJ0+fVrR0dFmm4iICNWrV0+pqamSpNTUVLVs2VJBQUFmm5iYGOXl5Zlny0uSn5+vvLw8hwUAAAAAAGdzedK9bds2+fr6ytvbWw899JAWL16syMhIZWZmysvLS/7+/g7tg4KClJmZKUnKzMx0SLiL6ovqzicpKUl+fn7mEhYW5txOAQAAAACgcpB0N23aVOnp6dq0aZOGDBmi+Ph47dy509J9JiYmKjc311wOHDhg6f4AAAAAAFcml97TLUleXl5q3LixJKlt27bavHmzXn75Zd1zzz06deqUcnJyHM52Z2VlKTg4WJIUHBysb775xmF7RU83L2pTEm9vb3l7ezu5JwAAAAAAOHL5me5zFRYWKj8/X23btlWVKlWUkpJi1u3evVsZGRmy2+2SJLvdrm3btik7O9tss2rVKtlsNkVGRpZ57AAAAAAA/J1Lz3QnJiaqR48eqlevno4ePaoFCxZo3bp1WrFihfz8/DRw4ECNHDlSAQEBstlsGjZsmOx2uzp06CBJ6tatmyIjI9WvXz9NmzZNmZmZGjNmjBISEjiTDQAAAABwOZcm3dnZ2erfv78OHTokPz8/tWrVSitWrFDXrl0lSS+99JLc3d0VFxen/Px8xcTEaObMmebnPTw8tHTpUg0ZMkR2u13Vq1dXfHy8Jk6c6KouAQAAAABgcmnS/dZbb12w3sfHR8nJyUpOTj5vm/DwcC1btszZoQEAAAAA8I+Vu3u6AQAAAACoLEi6AQAAAACwCEk3AAAAAAAWIekGAAAAAMAiJN0AAAAAAFiEpBsAAAAAAIuQdAMAAAAAYBGSbgAAAAAALELSDQAAAACARUi6AQAAAACwCEk3AAAAAAAWIekGAAAAAMAiJN0AAAAAAFiEpBsAAAAAAIuQdAMAAAAAYBGSbgAAAAAALELSDQAAAACARUi6AQAAAACwCEk3AAAAAAAWIekGAAAAAMAiJN0AAOCipk6dKjc3Nw0fPtwsO3nypBISElSrVi35+voqLi5OWVlZDp/LyMhQbGysqlWrpsDAQI0aNUpnzpwp4+gBAHAdkm4AAHBBmzdv1muvvaZWrVo5lI8YMUKfffaZFi1apPXr1+vgwYPq3bu3WV9QUKDY2FidOnVKGzZs0Lx58zR37lyNHTu2rLsAAIDLkHQDAIDzOnbsmPr27as33nhDNWvWNMtzc3P11ltv6cUXX1Tnzp3Vtm1bzZkzRxs2bNDGjRslSStXrtTOnTv13nvvqXXr1urRo4cmTZqk5ORknTp1ylVdAgCgTJF0AwCA80pISFBsbKyio6MdytPS0nT69GmH8oiICNWrV0+pqamSpNTUVLVs2VJBQUFmm5iYGOXl5WnHjh0l7i8/P195eXkOCwAAFZmnqwMAAADl08KFC/Xtt99q8+bNxeoyMzPl5eUlf39/h/KgoCBlZmaabf6ecBfVF9WVJCkpSRMmTHBC9AAAlA+c6QYAAMUcOHBAjz76qObPny8fH58y229iYqJyc3PN5cCBA2W2bwAArEDSDQAAiklLS1N2drauvfZaeXp6ytPTU+vXr9eMGTPk6empoKAgnTp1Sjk5OQ6fy8rKUnBwsCQpODi42NPMi9aL2pzL29tbNpvNYQEAoCIj6QYAAMV06dJF27ZtU3p6urm0a9dOffv2Nf9dpUoVpaSkmJ/ZvXu3MjIyZLfbJUl2u13btm1Tdna22WbVqlWy2WyKjIws8z4BAOAK3NMNAACKqVGjhlq0aOFQVr16ddWqVcssHzhwoEaOHKmAgADZbDYNGzZMdrtdHTp0kCR169ZNkZGR6tevn6ZNm6bMzEyNGTNGCQkJ8vb2LvM+AQDgCiTdAADgsrz00ktyd3dXXFyc8vPzFRMTo5kzZ5r1Hh4eWrp0qYYMGSK73a7q1asrPj5eEydOdGHUAACULZJuAABwSdatW+ew7uPjo+TkZCUnJ5/3M+Hh4Vq2bJnFkQEAUH5xTzcAAAAAABYh6QYAAAAAwCIk3QAAAAAAWISkGwAAAAAAi5B0AwAAAABgEZJuAAAAAAAsQtINAAAAAIBFXJp0JyUlqX379qpRo4YCAwPVq1cv7d6926FNp06d5Obm5rA89NBDDm0yMjIUGxuratWqKTAwUKNGjdKZM2fKsisAAAAAABTj6cqdr1+/XgkJCWrfvr3OnDmjp556St26ddPOnTtVvXp1s92gQYM0ceJEc71atWrmvwsKChQbG6vg4GBt2LBBhw4dUv/+/VWlShVNmTKlTPsDAAAAAMDfuTTpXr58ucP63LlzFRgYqLS0NN10001mebVq1RQcHFziNlauXKmdO3dq9erVCgoKUuvWrTVp0iSNHj1a48ePl5eXl6V9AAAAAADgfMrVPd25ubmSpICAAIfy+fPnq3bt2mrRooUSExP1559/mnWpqalq2bKlgoKCzLKYmBjl5eVpx44dJe4nPz9feXl5DgsAAAAAAM7m0jPdf1dYWKjhw4erY8eOatGihVn+73//W+Hh4QoNDdXWrVs1evRo7d69Wx9//LEkKTMz0yHhlmSuZ2ZmlrivpKQkTZgwwaKeAAAAAABwVrlJuhMSErR9+3Z99dVXDuWDBw82/92yZUuFhISoS5cu2rdvnxo1anRZ+0pMTNTIkSPN9by8PIWFhV1e4AAAAAAAnEe5uLx86NChWrp0qdauXau6detesG1UVJQkae/evZKk4OBgZWVlObQpWj/ffeDe3t6y2WwOCwAAAAAAzubSM92GYWjYsGFavHix1q1bpwYNGlz0M+np6ZKkkJAQSZLdbtfkyZOVnZ2twMBASdKqVatks9kUGRlpWewAAODKVv/Jz10dAgCgAnBp0p2QkKAFCxbok08+UY0aNcx7sP38/FS1alXt27dPCxYs0K233qpatWpp69atGjFihG666Sa1atVKktStWzdFRkaqX79+mjZtmjIzMzVmzBglJCTI29vbld0DAAAAAFzhXHp5+axZs5Sbm6tOnTopJCTEXP773/9Kkry8vLR69Wp169ZNEREReuyxxxQXF6fPPvvM3IaHh4eWLl0qDw8P2e123Xffferfv7/De70BAAAAAHAFl19efiFhYWFav379RbcTHh6uZcuWOSssAAAAAACcolw8SA0AAAAAgMqIpBsAAAAAAIuQdAMAAAAAYBGSbgAAAAAALELSDQAAAACARVz69HJUbPWf/NzVIQAAAABAucaZbgAAAAAALELSDQAAAACARUi6AQAAAACwCEk3AAAAAAAWIekGAAAAAMAiJN0AAAAAAFiEpBsAAAAAAIuQdAMAAAAAYBGSbgAAAAAALELSDQAAAACARUi6AQAAAACwyGUl3Q0bNtTvv/9erDwnJ0cNGzb8x0EBAAAAAFAZXFbS/fPPP6ugoKBYeX5+vn799dd/HBQAAAAAAJWBZ2kaf/rpp+a/V6xYIT8/P3O9oKBAKSkpql+/vtOCAwAAAACgIitV0t2rVy9Jkpubm+Lj4x3qqlSpovr16+uFF15wWnAAAAAAAFRkpUq6CwsLJUkNGjTQ5s2bVbt2bUuCAgAAAACgMihV0l1k//79zo4DAAAAAIBK57KSbklKSUlRSkqKsrOzzTPgRd5+++1/HBgAAAAAABXdZSXdEyZM0MSJE9WuXTuFhITIzc3N2XEBAAAAAFDhXVbSPXv2bM2dO1f9+vVzdjwAAAAAAFQal/We7lOnTun66693diwAAAAAAFQql5V0P/jgg1qwYIGzYwEAAAAAoFK5rMvLT548qddff12rV69Wq1atVKVKFYf6F1980SnBAQAAAABQkV1W0r1161a1bt1akrR9+3aHOh6qBgAAAADAWZeVdK9du9bZcQBXhPpPfu7U7f08Ndap2wMAAADgXJd1TzcAAAAAALi4yzrTfcstt1zwMvI1a9ZcdkAAAAAAAFQWl5V0F93PXeT06dNKT0/X9u3bFR8f74y4AAAAAACo8C4r6X7ppZdKLB8/fryOHTv2jwICAAAAAKCycOo93ffdd5/efvttZ24SAAAAAIAKy6lJd2pqqnx8fJy5SQAA4AKzZs1Sq1atZLPZZLPZZLfb9cUXX5j1J0+eVEJCgmrVqiVfX1/FxcUpKyvLYRsZGRmKjY1VtWrVFBgYqFGjRunMmTNl3RUAAFzqsi4v7927t8O6YRg6dOiQtmzZomeeecYpgQEAANepW7eupk6dqiZNmsgwDM2bN0933HGHvvvuOzVv3lwjRozQ559/rkWLFsnPz09Dhw5V79699fXXX0uSCgoKFBsbq+DgYG3YsEGHDh1S//79VaVKFU2ZMsXFvQMAoOxc1pluPz8/hyUgIECdOnXSsmXLNG7cuEveTlJSktq3b68aNWooMDBQvXr10u7dux3a8E06AABlr2fPnrr11lvVpEkTXX311Zo8ebJ8fX21ceNG5ebm6q233tKLL76ozp07q23btpozZ442bNigjRs3SpJWrlypnTt36r333lPr1q3Vo0cPTZo0ScnJyTp16pSLewcAQNm5rDPdc+bMccrO169fr4SEBLVv315nzpzRU089pW7dumnnzp2qXr26JPFNOgAALlZQUKBFixbp+PHjstvtSktL0+nTpxUdHW22iYiIUL169ZSamqoOHTooNTVVLVu2VFBQkNkmJiZGQ4YM0Y4dO9SmTRtXdAUAgDJ3WUl3kbS0NP3www+SpObNm5d6Al2+fLnD+ty5cxUYGKi0tDTddNNN5jfpCxYsUOfOnSWdTfibNWumjRs3qkOHDuY36atXr1ZQUJBat26tSZMmafTo0Ro/fry8vLz+SRcBALhibdu2TXa7XSdPnpSvr68WL16syMhIpaeny8vLS/7+/g7tg4KClJmZKUnKzMx0SLiL6ovqzic/P1/5+fnmel5enpN6AwCAa1zW5eXZ2dnq3Lmz2rdvr0ceeUSPPPKI2rZtqy5duujw4cOXHUxubq4kKSAgQJIu+k26pPN+k56Xl6cdO3aUuJ/8/Hzl5eU5LAAAwFHTpk2Vnp6uTZs2aciQIYqPj9fOnTst3WdSUpLDLWxhYWGW7g8AAKtdVtI9bNgwHT16VDt27NCRI0d05MgRbd++XXl5eXrkkUcuK5DCwkINHz5cHTt2VIsWLSSd/Sbcim/SmdABALg4Ly8vNW7cWG3btlVSUpKuueYavfzyywoODtapU6eUk5Pj0D4rK0vBwcGSpODg4GLPYClaL2pTksTEROXm5prLgQMHnNspAADK2GUl3cuXL9fMmTPVrFkzsywyMlLJyckOrxMpjYSEBG3fvl0LFy68rM+XBhM6AAClV1hYqPz8fLVt21ZVqlRRSkqKWbd7925lZGTIbrdLkux2u7Zt26bs7GyzzapVq2Sz2RQZGXnefXh7e5uvKStaAACoyC7rnu7CwkJVqVKlWHmVKlVUWFhY6u0NHTpUS5cu1Zdffqm6deua5X//Jv3vZ7vP/Sb9m2++cdjexb5J9/b2lre3d6njBADgSpGYmKgePXqoXr16Onr0qBYsWKB169ZpxYoV8vPz08CBAzVy5EgFBATIZrNp2LBhstvt6tChgySpW7duioyMVL9+/TRt2jRlZmZqzJgxSkhIYA4GAFxRLutMd+fOnfXoo4/q4MGDZtmvv/6qESNGqEuXLpe8HcMwNHToUC1evFhr1qxRgwYNHOqt/CYdAACcX3Z2tvr376+mTZuqS5cu2rx5s1asWKGuXbtKkl566SXddtttiouL00033aTg4GB9/PHH5uc9PDy0dOlSeXh4yG6367777lP//v01ceJEV3UJAACXcDMMwyjthw4cOKDbb79dO3bsMO+HPnDggFq0aKFPP/3U4Wz1hTz88MNasGCBPvnkEzVt2tQs9/PzU9WqVSVJQ4YM0bJlyzR37lzzm3RJ2rBhg6SzrzFp3bq1QkNDzW/S+/XrpwcffPCSXxmWl5cnPz8/5ebmchlbKdR/8nNXh3DF+3lqrKtDAGCxK32OKs/9v1LmQeYaACjZpc5Rl3V5eVhYmL799lutXr1au3btkiQ1a9bM4Snjl2LWrFmSpE6dOjmUz5kzRwMGDJB09pt0d3d3xcXFKT8/XzExMZo5c6bZtuib9CFDhshut6t69eqKj4/nm3QAAAAAgMuVKules2aNhg4dqo0bN8pms6lr167mZWa5ublq3ry5Zs+erRtvvPGStncpJ9l9fHyUnJys5OTk87YJDw/XsmXLLq0TAAAAAACUkVLd0z19+nQNGjSoxFPnfn5++s9//qMXX3zRacEBAAAAAFCRlSrp/v7779W9e/fz1nfr1k1paWn/OCgAAAAAACqDUiXdWVlZJb4qrIinp6cOHz78j4MCAAAAAKAyKFXSfdVVV2n79u3nrd+6datCQkL+cVAAAAAAAFQGpUq6b731Vj3zzDM6efJksboTJ05o3Lhxuu2225wWHAAAAAAAFVmpnl4+ZswYffzxx7r66qs1dOhQ893au3btUnJysgoKCvT0009bEigAAAAAABVNqZLuoKAgbdiwQUOGDFFiYqL5yi83NzfFxMQoOTlZQUFBlgQKAAAAAEBFU6qkW/rrndh//PGH9u7dK8Mw1KRJE9WsWdOK+AAAAAAAqLBKnXQXqVmzptq3b+/MWAAAAAAAqFRK9SA1AAAAAABw6Ui6AQAAAACwCEk3AAAAAAAWIekGAAAAAMAiJN0AAAAAAFiEpBsAAAAAAIuQdAMAAAAAYBGSbgAAAAAALELSDQAAAACARUi6AQAAAACwCEk3AAAAAAAWIekGAAAAAMAiJN0AAAAAAFiEpBsAAAAAAIuQdAMAAAAAYBGSbgAAAAAALELSDQAAAACARUi6AQAAAACwCEk3AAAAAAAWIekGAAAAAMAiJN0AAAAAAFiEpBsAAAAAAIuQdAMAAAAAYBGSbgAAAAAALOLp6gAAAABQftV/8nOnbu/nqbFO3R4AlHec6QYAAAAAwCIk3QAAAAAAWISkGwAAAAAAi5B0AwAAAABgEZcm3V9++aV69uyp0NBQubm5acmSJQ71AwYMkJubm8PSvXt3hzZHjhxR3759ZbPZ5O/vr4EDB+rYsWNl2AsAAAAAAErm0qT7+PHjuuaaa5ScnHzeNt27d9ehQ4fM5f3333eo79u3r3bs2KFVq1Zp6dKl+vLLLzV48GCrQwcAAAAA4KJc+sqwHj16qEePHhds4+3treDg4BLrfvjhBy1fvlybN29Wu3btJEmvvPKKbr31Vj3//PMKDQ11eswAAAAAAFyqcn9P97p16xQYGKimTZtqyJAh+v3338261NRU+fv7mwm3JEVHR8vd3V2bNm067zbz8/OVl5fnsAAAAAAA4GzlOunu3r273nnnHaWkpOi5557T+vXr1aNHDxUUFEiSMjMzFRgY6PAZT09PBQQEKDMz87zbTUpKkp+fn7mEhYVZ2g8AAAAAwJXJpZeXX0yfPn3Mf7ds2VKtWrVSo0aNtG7dOnXp0uWyt5uYmKiRI0ea63l5eSTeAAAAAACnK9dnus/VsGFD1a5dW3v37pUkBQcHKzs726HNmTNndOTIkfPeBy6dvU/cZrM5LAAAAAAAOFuFSrr/97//6ffff1dISIgkyW63KycnR2lpaWabNWvWqLCwUFFRUa4KEwAAAAAASS6+vPzYsWPmWWtJ2r9/v9LT0xUQEKCAgABNmDBBcXFxCg4O1r59+/TEE0+ocePGiomJkSQ1a9ZM3bt316BBgzR79mydPn1aQ4cOVZ8+fXhyOQAAAADA5VyadG/ZskW33HKLuV50n3V8fLxmzZqlrVu3at68ecrJyVFoaKi6deumSZMmydvb2/zM/PnzNXToUHXp0kXu7u6Ki4vTjBkzyrwvgCvUf/Jzp23r56mxTtsWAAAAgLNcmnR36tRJhmGct37FihUX3UZAQIAWLFjgzLAAAAAAAHCKCnVPNwAAAAAAFQlJNwAAAAAAFiHpBgAAAADAIiTdAAAAAABYhKQbAAAUk5SUpPbt26tGjRoKDAxUr169tHv3boc2J0+eVEJCgmrVqiVfX1/FxcUpKyvLoU1GRoZiY2NVrVo1BQYGatSoUTpz5kxZdgUAAJci6QYAAMWsX79eCQkJ2rhxo1atWqXTp0+rW7duOn78uNlmxIgR+uyzz7Ro0SKtX79eBw8eVO/evc36goICxcbG6tSpU9qwYYPmzZunuXPnauzYsa7oEgAALuHSV4YBAIDyafny5Q7rc+fOVWBgoNLS0nTTTTcpNzdXb731lhYsWKDOnTtLkubMmaNmzZpp48aN6tChg1auXKmdO3dq9erVCgoKUuvWrTVp0iSNHj1a48ePl5eXlyu6BgBAmeJMNwAAuKjc3FxJUkBAgCQpLS1Np0+fVnR0tNkmIiJC9erVU2pqqiQpNTVVLVu2VFBQkNkmJiZGeXl52rFjRxlGDwCA63CmGwAAXFBhYaGGDx+ujh07qkWLFpKkzMxMeXl5yd/f36FtUFCQMjMzzTZ/T7iL6ovqSpKfn6/8/HxzPS8vz1ndAADAJTjTDQAALighIUHbt2/XwoULLd9XUlKS/Pz8zCUsLMzyfQIAYCWSbgAAcF5Dhw7V0qVLtXbtWtWtW9csDw4O1qlTp5STk+PQPisrS8HBwWabc59mXrRe1OZciYmJys3NNZcDBw44sTcAAJQ9km4AAFCMYRgaOnSoFi9erDVr1qhBgwYO9W3btlWVKlWUkpJilu3evVsZGRmy2+2SJLvdrm3btik7O9tss2rVKtlsNkVGRpa4X29vb9lsNocFAICKjHu6AQBAMQkJCVqwYIE++eQT1ahRw7wH28/PT1WrVpWfn58GDhyokSNHKiAgQDabTcOGDZPdbleHDh0kSd26dVNkZKT69eunadOmKTMzU2PGjFFCQoK8vb1d2T0AAMoMSTcAAChm1qxZkqROnTo5lM+ZM0cDBgyQJL300ktyd3dXXFyc8vPzFRMTo5kzZ5ptPTw8tHTpUg0ZMkR2u13Vq1dXfHy8Jk6cWFbdAADA5Ui6AQBAMYZhXLSNj4+PkpOTlZycfN424eHhWrZsmTNDAwCgQuGebgAAAAAALELSDQAAAACARUi6AQAAAACwCEk3AAAAAAAWIekGAAAAAMAiJN0AAAAAAFiEpBsAAAAAAIuQdAMAAAAAYBGSbgAAAAAALELSDQAAAACARUi6AQAAAACwCEk3AAAAAAAWIekGAAAAAMAinq4OAGWr/pOfuzoEAAAAALhicKYbAAAAAACLkHQDAAAAAGARkm4AAAAAACxC0g0AAAAAgEVIugEAAAAAsAhJNwAAAAAAFiHpBgAAAADAIrynGwAAAGWm/pOfO21bP0+Nddq2AMAqLj3T/eWXX6pnz54KDQ2Vm5ublixZ4lBvGIbGjh2rkJAQVa1aVdHR0dqzZ49DmyNHjqhv376y2Wzy9/fXwIEDdezYsTLsBQAAAAAAJXNp0n38+HFdc801Sk5OLrF+2rRpmjFjhmbPnq1NmzapevXqiomJ0cmTJ802ffv21Y4dO7Rq1SotXbpUX375pQYPHlxWXQAAAAAA4Lxcenl5jx491KNHjxLrDMPQ9OnTNWbMGN1xxx2SpHfeeUdBQUFasmSJ+vTpox9++EHLly/X5s2b1a5dO0nSK6+8oltvvVXPP/+8QkNDy6wvAAAAAACcq9w+SG3//v3KzMxUdHS0Webn56eoqCilpqZKklJTU+Xv728m3JIUHR0td3d3bdq0qcxjBgAAAADg78rtg9QyMzMlSUFBQQ7lQUFBZl1mZqYCAwMd6j09PRUQEGC2KUl+fr7y8/PN9by8PGeFDQAAAACAqdye6bZSUlKS/Pz8zCUsLMzVIQEAAAAAKqFym3QHBwdLkrKyshzKs7KyzLrg4GBlZ2c71J85c0ZHjhwx25QkMTFRubm55nLgwAEnRw8AAAAAQDlOuhs0aKDg4GClpKSYZXl5edq0aZPsdrskyW63KycnR2lpaWabNWvWqLCwUFFRUefdtre3t2w2m8MCAAAAAICzufSe7mPHjmnv3r3m+v79+5Wenq6AgADVq1dPw4cP17PPPqsmTZqoQYMGeuaZZxQaGqpevXpJkpo1a6bu3btr0KBBmj17tk6fPq2hQ4eqT58+PLkcAAAAAOByLk26t2zZoltuucVcHzlypCQpPj5ec+fO1RNPPKHjx49r8ODBysnJ0Q033KDly5fLx8fH/Mz8+fM1dOhQdenSRe7u7oqLi9OMGTPKvC8AAAAAAJzLpUl3p06dZBjGeevd3Nw0ceJETZw48bxtAgICtGDBAivCAwAAAADgHym393QDAAAAAFDRkXQDAAAAAGARkm4AAAAAACxC0g0AAAAAgEVIugEAAAAAsAhJNwAAAAAAFiHpBgAAAADAIiTdAAAAAABYhKQbAAAAAACLkHQDAAAAAGARkm4AAAAAACxC0g0AAAAAgEVIugEAAAAAsAhJNwAAAAAAFiHpBgAAAADAIiTdAAAAAABYhKQbAAAAAACLkHQDAAAAAGARkm4AAAAAACxC0g0AAAAAgEVIugEAAAAAsAhJNwAAAAAAFiHpBgAAxXz55Zfq2bOnQkND5ebmpiVLljjUG4ahsWPHKiQkRFWrVlV0dLT27Nnj0ObIkSPq27evbDab/P39NXDgQB07dqwMewEAgOuRdAMAgGKOHz+ua665RsnJySXWT5s2TTNmzNDs2bO1adMmVa9eXTExMTp58qTZpm/fvtqxY4dWrVqlpUuX6ssvv9TgwYPLqgsAAJQLnq4OAAAAlD89evRQjx49SqwzDEPTp0/XmDFjdMcdd0iS3nnnHQUFBWnJkiXq06ePfvjhBy1fvlybN29Wu3btJEmvvPKKbr31Vj3//PMKDQ0ts74AAOBKnOkGAAClsn//fmVmZio6Otos8/PzU1RUlFJTUyVJqamp8vf3NxNuSYqOjpa7u7s2bdp03m3n5+crLy/PYQEAoCLjTDcASVL9Jz932rZ+nhrrtG0BKH8yMzMlSUFBQQ7lQUFBZl1mZqYCAwMd6j09PRUQEGC2KUlSUpImTJjg5IgBAHAdznQDAIByIzExUbm5ueZy4MABV4cEAMA/QtINAABKJTg4WJKUlZXlUJ6VlWXWBQcHKzs726H+zJkzOnLkiNmmJN7e3rLZbA4LAAAVGZeXAwCAUmnQoIGCg4OVkpKi1q1bS5Ly8vK0adMmDRkyRJJkt9uVk5OjtLQ0tW3bVpK0Zs0aFRYWKioqylWhO/VWGgAALgVJNwAAKObYsWPau3evub5//36lp6crICBA9erV0/Dhw/Xss8+qSZMmatCggZ555hmFhoaqV69ekqRmzZqpe/fuGjRokGbPnq3Tp09r6NCh6tOnD08uBwBcUUi6AQBAMVu2bNEtt9xiro8cOVKSFB8fr7lz5+qJJ57Q8ePHNXjwYOXk5OiGG27Q8uXL5ePjY35m/vz5Gjp0qLp06SJ3d3fFxcVpxowZZd4XAABciaQbAAAU06lTJxmGcd56Nzc3TZw4URMnTjxvm4CAAC1YsMCK8AAAqDB4kBoAAAAAABYh6QYAAAAAwCIk3QAAAAAAWISkGwAAAAAAi5B0AwAAAABgkXKddI8fP15ubm4OS0REhFl/8uRJJSQkqFatWvL19VVcXJyysrJcGDEAAAAAAH8p10m3JDVv3lyHDh0yl6+++sqsGzFihD777DMtWrRI69ev18GDB9W7d28XRgsAAAAAwF/K/Xu6PT09FRwcXKw8NzdXb731lhYsWKDOnTtLkubMmaNmzZpp48aN6tChQ1mHCgAAAACAg3J/pnvPnj0KDQ1Vw4YN1bdvX2VkZEiS0tLSdPr0aUVHR5ttIyIiVK9ePaWmproqXAAAAAAATOX6THdUVJTmzp2rpk2b6tChQ5owYYJuvPFGbd++XZmZmfLy8pK/v7/DZ4KCgpSZmXnB7ebn5ys/P99cz8vLsyJ8AAAAAMAVrlwn3T169DD/3apVK0VFRSk8PFwffPCBqlatetnbTUpK0oQJE5wRIgAAAAAA51Wuk+5z+fv76+qrr9bevXvVtWtXnTp1Sjk5OQ5nu7Oyskq8B/zvEhMTNXLkSHM9Ly9PYWFhVoUNXHHqP/m5U7f389RYp24PAAAAKCvl/p7uvzt27Jj27dunkJAQtW3bVlWqVFFKSopZv3v3bmVkZMhut19wO97e3rLZbA4LAAAAAADOVq7PdD/++OPq2bOnwsPDdfDgQY0bN04eHh6699575efnp4EDB2rkyJEKCAiQzWbTsGHDZLfbeXI5AAAAAKBcKNdJ9//+9z/de++9+v3331WnTh3dcMMN2rhxo+rUqSNJeumll+Tu7q64uDjl5+crJiZGM2fOdHHUAAAAAACcVa6T7oULF16w3sfHR8nJyUpOTi6jiAAAAAAAuHQV6p5uAAAAAAAqEpJuAAAAAAAsQtINAAAAAIBFSLoBAAAAALAISTcAAAAAABYh6QYAAAAAwCIk3QAAAAAAWKRcv6cbAAAAOJ/6T37utG39PDXWadsCgL/jTDcAAAAAABYh6QYAAAAAwCIk3QAAAAAAWISkGwAAAAAAi5B0AwAAAABgEZJuAAAAAAAsQtINAAAAAIBFeE93OefM908CAAAAAMoWZ7oBAAAAALAISTcAAAAAABYh6QYAAAAAwCIk3QAAAAAAWISkGwAAAAAAi5B0AwAAAABgEZJuAAAAAAAsQtINAAAAAIBFSLoBAAAAALAISTcAAAAAABbxdHUAAHAx9Z/83Gnb+nlqrNO2BQAAAFwMZ7oBAAAAALAISTcAAAAAABYh6QYAAAAAwCIk3QAAAAAAWISkGwAAAAAAi/D0cgBXFJ6EDgAAgLLEmW4AAAAAACxC0g0AAAAAgEVIugEAAAAAsAhJNwAAAAAAFuFBahZw5oOaAAAAAAAVV6U5052cnKz69evLx8dHUVFR+uabb1wdEgAAEHM0AODKVinOdP/3v//VyJEjNXv2bEVFRWn69OmKiYnR7t27FRgY6OrwAAC4YjFHo6Jw9pWKvFYSQJFKcab7xRdf1KBBg3T//fcrMjJSs2fPVrVq1fT222+7OjQAAK5ozNEAgCtdhT/TferUKaWlpSkxMdEsc3d3V3R0tFJTU10YGYDKjrMiwIUxRwMAUAmS7t9++00FBQUKCgpyKA8KCtKuXbtK/Ex+fr7y8/PN9dzcXElSXl6eU2IqzP/TKdsBcGWpN2KR07a1fUKM07bVYtwKp21Lcm5szlRe+1k0NxmG4ZTtlSXmaFzJyuvf9PLMmX+Hy/OYXSn9dKaKPkdX+KT7ciQlJWnChAnFysPCwlwQDQA4n990V0dwfuU5Nmdydj9///13+fn5OXej5RBzNFDclfJ305mulDG7UvrpbM4et6NHj15wjq7wSXft2rXl4eGhrKwsh/KsrCwFBweX+JnExESNHDnSXC8sLNSRI0dUq1Ytubm5WRpvZZOXl6ewsDAdOHBANpvN1eFUKoytNRhXazCu1snNzVW9evUUEBDg6lBKrazmaH7//sJYnMU4/IWx+AtjcRbj8Jd/OhaGYejo0aMKDQ29YLsKn3R7eXmpbdu2SklJUa9evSSdnaBTUlI0dOjQEj/j7e0tb29vhzJ/f3+LI63cbDbbFf+f1iqMrTUYV2swrtZxd694zz4t6zma37+/MBZnMQ5/YSz+wlicxTj85Z+MxaVchVbhk25JGjlypOLj49WuXTtdd911mj59uo4fP67777/f1aEBAHBFY44GAFzpKkXSfc899+jw4cMaO3asMjMz1bp1ay1fvrzYg1sAAEDZYo4GAFzpKkXSLUlDhw4976VqsI63t7fGjRtX7FJA/HOMrTUYV2swrtapDGNr9RxdGcbIWRiLsxiHvzAWf2EszmIc/lJWY+FmVMR3kAAAAAAAUAFUvKeyAAAAAABQQZB0AwAAAABgEZJuAAAAAAAsQtKNEn355Zfq2bOnQkND5ebmpiVLljjUG4ahsWPHKiQkRFWrVlV0dLT27Nnj0ObIkSPq27evbDab/P39NXDgQB07dqwMe1H+JCUlqX379qpRo4YCAwPVq1cv7d6926HNyZMnlZCQoFq1asnX11dxcXHKyspyaJORkaHY2FhVq1ZNgYGBGjVqlM6cOVOWXSlXZs2apVatWpnvWLTb7friiy/MesbUOaZOnSo3NzcNHz7cLGNsS2/8+PFyc3NzWCIiIsx6xrT0kpOTVb9+ffn4+CgqKkrffPONq0OylLPmksrmcv9GVRa//vqr7rvvPtWqVUtVq1ZVy5YttWXLFrP+Uo7dKoOCggI988wzatCggapWrapGjRpp0qRJ+vtjrCrrWHD8ftaFxuH06dMaPXq0WrZsqerVqys0NFT9+/fXwYMHHbbh9HEwgBIsW7bMePrpp42PP/7YkGQsXrzYoX7q1KmGn5+fsWTJEuP77783br/9dqNBgwbGiRMnzDbdu3c3rrnmGmPjxo3G//t//89o3Lixce+995ZxT8qXmJgYY86cOcb27duN9PR049ZbbzXq1atnHDt2zGzz0EMPGWFhYUZKSoqxZcsWo0OHDsb1119v1p85c8Zo0aKFER0dbXz33XfGsmXLjNq1axuJiYmu6FK58Omnnxqff/658eOPPxq7d+82nnrqKaNKlSrG9u3bDcNgTJ3hm2++MerXr2+0atXKePTRR81yxrb0xo0bZzRv3tw4dOiQuRw+fNisZ0xLZ+HChYaXl5fx9ttvGzt27DAGDRpk+Pv7G1lZWa4OzTLOmEsqm8v9G1VZHDlyxAgPDzcGDBhgbNq0yfjpp5+MFStWGHv37jXbXMqxW2UwefJko1atWsbSpUuN/fv3G4sWLTJ8fX2Nl19+2WxTWceC4/ezLjQOOTk5RnR0tPHf//7X2LVrl5Gammpcd911Rtu2bR224exxIOnGRZ37y1pYWGgEBwcb//d//2eW5eTkGN7e3sb7779vGIZh7Ny505BkbN682WzzxRdfGG5ubsavv/5aZrGXd9nZ2YYkY/369YZhnB3HKlWqGIsWLTLb/PDDD4YkIzU11TCMs39I3N3djczMTLPNrFmzDJvNZuTn55dtB8qxmjVrGm+++SZj6gRHjx41mjRpYqxatcq4+eabzQNaxvbyjBs3zrjmmmtKrGNMS++6664zEhISzPWCggIjNDTUSEpKcmFUZety5pLK5J/8jaosRo8ebdxwww3nrb+UY7fKIjY21njggQccynr37m307dvXMIwrZyw4fj+rpC8fzvXNN98YkoxffvnFMAxrxoHLy1Fq+/fvV2ZmpqKjo80yPz8/RUVFKTU1VZKUmpoqf39/tWvXzmwTHR0td3d3bdq0qcxjLq9yc3MlSQEBAZKktLQ0nT592mFsIyIiVK9ePYexbdmypYKCgsw2MTExysvL044dO8ow+vKpoKBACxcu1PHjx2W32xlTJ0hISFBsbKzDGEr8vv4Te/bsUWhoqBo2bKi+ffsqIyNDEmNaWqdOnVJaWprDeLm7uys6OtocryvB5cwllck/+RtVWXz66adq166d/vWvfykwMFBt2rTRG2+8YdZfyrFbZXH99dcrJSVFP/74oyTp+++/11dffaUePXpIurLG4u84fj+/3Nxcubm5yd/fX5I14+DpjEBxZcnMzJQkhwO+ovWiuszMTAUGBjrUe3p6KiAgwGxzpSssLNTw4cPVsWNHtWjRQtLZcfPy8jL/0xc5d2xLGvuiuivVtm3bZLfbdfLkSfn6+mrx4sWKjIxUeno6Y/oPLFy4UN9++602b95crI7f18sTFRWluXPnqmnTpjp06JAmTJigG2+8Udu3b2dMS+m3335TQUFBieOxa9cuF0VVti53Lqks/unfqMrip59+0qxZszRy5Eg99dRT2rx5sx555BF5eXkpPj7+ko7dKosnn3xSeXl5ioiIkIeHhwoKCjR58mT17dtX0qUdx1ZGHL+X7OTJkxo9erTuvfde2Ww2SdaMA0k34CIJCQnavn27vvrqK1eHUik0bdpU6enpys3N1Ycffqj4+HitX7/e1WFVaAcOHNCjjz6qVatWycfHx9XhVBpFZ1skqVWrVoqKilJ4eLg++OADVa1a1YWRoSK6kucS/kb9pbCwUO3atdOUKVMkSW3atNH27ds1e/ZsxcfHuzi6svXBBx9o/vz5WrBggZo3b6709HQNHz5coaGhV9xY4MJOnz6tu+++W4ZhaNasWZbui8vLUWrBwcGSVOzpn1lZWWZdcHCwsrOzHerPnDmjI0eOmG2uZEOHDtXSpUu1du1a1a1b1ywPDg7WqVOnlJOT49D+3LEtaeyL6q5UXl5eaty4sdq2baukpCRdc801evnllxnTfyAtLU3Z2dm69tpr5enpKU9PT61fv14zZsyQp6engoKCGFsn8Pf319VXX629e/fy+1pKtWvXloeHxwXno8rsn8wllYEz/kZVFiEhIYqMjHQoa9asmXnryqUcu1UWo0aN0pNPPqk+ffqoZcuW6tevn0aMGKGkpCRJV9ZY/B3H746KEu5ffvlFq1atMs9yS9aMA0k3Sq1BgwYKDg5WSkqKWZaXl6dNmzbJbrdLkux2u3JycpSWlma2WbNmjQoLCxUVFVXmMZcXhmFo6NChWrx4sdasWaMGDRo41Ldt21ZVqlRxGNvdu3crIyPDYWy3bdvm8Meg6I/FuRPulaywsFD5+fmM6T/QpUsXbdu2Tenp6ebSrl079e3b1/w3Y/vPHTt2TPv27VNISAi/r6Xk5eWltm3bOoxXYWGhUlJSzPGqjJwxl1QGzvgbVVl07Nix2GvjfvzxR4WHh0u6tGO3yuLPP/+Uu7tjiuPh4aHCwkJJV9ZY/B3H738pSrj37Nmj1atXq1atWg71lozDZT1+DZXe0aNHje+++8747rvvDEnGiy++aHz33XfmU/2mTp1q+Pv7G5988omxdetW44477ijxlQNt2rQxNm3aZHz11VdGkyZNKtwrB5xtyJAhhp+fn7Fu3TqH1wX9+eefZpuHHnrIqFevnrFmzRpjy5Ytht1uN+x2u1lf9Lqgbt26Genp6cby5cuNOnXqXLGvCzIMw3jyySeN9evXG/v37ze2bt1qPPnkk4abm5uxcuVKwzAYU2f6+5OBDYOxvRyPPfaYsW7dOmP//v3G119/bURHRxu1a9c2srOzDcNgTEtr4cKFhre3tzF37lxj586dxuDBgw1/f3+Hp7tXNs6YSyqr0v6Nqiy++eYbw9PT05g8ebKxZ88eY/78+Ua1atWM9957z2xzKcdulUF8fLxx1VVXma8M+/jjj43atWsbTzzxhNmmso4Fx+9nXWgcTp06Zdx+++1G3bp1jfT0dIe/oX9/A4izx4GkGyVau3atIanYEh8fbxjG2dcOPPPMM0ZQUJDh7e1tdOnSxdi9e7fDNn7//Xfj3nvvNXx9fQ2bzWbcf//9xtGjR13Qm/KjpDGVZMyZM8dsc+LECePhhx82atasaVSrVs248847jUOHDjls5+effzZ69OhhVK1a1ahdu7bx2GOPGadPny7j3pQfDzzwgBEeHm54eXkZderUMbp06WIm3IbBmDrTuQe0jG3p3XPPPUZISIjh5eVlXHXVVcY999zj8C5dxrT0XnnlFaNevXqGl5eXcd111xkbN250dUiWctZcUhldzt+oyuKzzz4zWrRoYXh7exsRERHG66+/7lB/KcdulUFeXp7x6KOPGvXq1TN8fHyMhg0bGk8//bRDQlVZx4Lj97MuNA779+8/79/QtWvXmttw9ji4GYZhXN45cgAAAAAAcCHc0w0AAAAAgEVIugEAAAAAsAhJNwAAAAAAFiHpBgAAAADAIiTdAAAAAABYhKQbAAAAAACLkHQDAAAAAGARkm4AAAAAACxC0g2g3BkwYIB69erl6jAAAMA5mKOB0iPpBq5grp44f/75Z7m5uSk9Pd1lMQAAUB4xRwOVB0k3AAAAAAAWIekGUKLt27erR48e8vX1VVBQkPr166fffvvNrO/UqZMeeeQRPfHEEwoICFBwcLDGjx/vsI1du3bphhtukI+PjyIjI7V69Wq5ublpyZIlkqQGDRpIktq0aSM3Nzd16tTJ4fPPP/+8QkJCVKtWLSUkJOj06dNWdhkAgAqBORqoWEi6ARSTk5Ojzp07q02bNtqyZYuWL1+urKws3X333Q7t5s2bp+rVq2vTpk2aNm2aJk6cqFWrVkmSCgoK1KtXL1WrVk2bNm3S66+/rqefftrh8998840kafXq1Tp06JA+/vhjs27t2rXat2+f1q5dq3nz5mnu3LmaO3eutR0HAKCcY44GKh5PVwcAoPx59dVX1aZNG02ZMsUse/vttxUWFqYff/xRV199tSSpVatWGjdunCSpSZMmevXVV5WSkqKuXbtq1apV2rdvn9atW6fg4GBJ0uTJk9W1a1dzm3Xq1JEk1apVy2xTpGbNmnr11Vfl4eGhiIgIxcbGKiUlRYMGDbK07wAAlGfM0UDFQ9INoJjvv/9ea9eula+vb7G6ffv2OUzofxcSEqLs7GxJ0u7duxUWFuYwUV933XWXHEPz5s3l4eHhsO1t27aVqh8AAFQ2zNFAxUPSDaCYY8eOqWfPnnruueeK1YWEhJj/rlKlikOdm5ubCgsLnRKDldsGAKCiYo4GKh6SbgDFXHvttfroo49Uv359eXpe3p+Jpk2b6sCBA8rKylJQUJAkafPmzQ5tvLy8JJ29twwAAFwcczRQ8fAgNeAKl5ubq/T0dIdl8ODBOnLkiO69915t3rxZ+/bt04oVK3T//fdf8uTbtWtXNWrUSPHx8dq6dau+/vprjRkzRtLZb8QlKTAwUFWrVjUfApObm2tZPwEAqGiYo4HKgaQbuMKtW7dObdq0cVgmTZqkr7/+WgUFBerWrZtatmyp4cOHy9/fX+7ul/Znw8PDQ0uWLNGxY8fUvn17Pfjgg+aTUX18fCRJnp6emjFjhl577TWFhobqjjvusKyfAABUNMzRQOXgZhiG4eogAFwZvv76a91www3au3evGjVq5OpwAADA/485GrAOSTcAyyxevFi+vr5q0qSJ9u7dq0cffVQ1a9bUV1995erQAAC4ojFHA2WHB6kBsMzRo0c1evRoZWRkqHbt2oqOjtYLL7zg6rAAALjiMUcDZYcz3QAAAAAAWIQHqQEAAAAAYBGSbgAAAAAALELSDQAAAACARUi6AQAAAACwCEk3AAAAAAAWIekGAAAAAMAiJN0AAAAAAFiEpBsAAAAAAIuQdAMAAAAAYJH/D++WSDGUS48bAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "fig, axes = plt.subplots(1, 2, figsize=(10, 5) )\n", + "axes[0].hist(Len_SciTLDR_train_Textt, bins = 20 )\n", + "axes[0].set_title(\"SciTLDR_train_Text Length\")\n", + "axes[0].set_xlabel(\"Length\")\n", + "axes[0].set_ylabel(\"Count\")\n", + "\n", + "axes[1].hist(Len_SciTLDR_train_Summaryy, bins = 20 )\n", + "axes[1].set_title(\"SciTLDR_train_Summary Length\")\n", + "axes[1].set_xlabel(\"Length\")\n", + "plt.tight_layout()\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ac07292a", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 561, + "id": "1c5af909", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_train.drop(NN_SciTLDR_train[NN_SciTLDR_train.Len_SciTLDR_train_Text < 50].index, inplace=True)\n", + "NN_SciTLDR_train.drop(NN_SciTLDR_train[NN_SciTLDR_train.Len_SciTLDR_train_Text > 400].index, inplace=True)\n", + "\n", + "NN_SciTLDR_train.drop(NN_SciTLDR_train[NN_SciTLDR_train.Len_SciTLDR_train_Summary < 10].index, inplace=True)\n", + "NN_SciTLDR_train.drop(NN_SciTLDR_train[NN_SciTLDR_train.Len_SciTLDR_train_Summary > 50].index, inplace=True)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 562, + "id": "4e2d8173", + "metadata": {}, + "outputs": [], + "source": [ + "NeW_Len_SciTLDR_train_Text = []\n", + "NeW_Len_SciTLDR_train_Text = NN_SciTLDR_train['Len_SciTLDR_train_Text'].values.tolist()\n", + "\n", + "NeW_Len_SciTLDR_train_Summary = []\n", + "NeW_Len_SciTLDR_train_Summary = NN_SciTLDR_train['Len_SciTLDR_train_Summary'].values.tolist()\n" + ] + }, + { + "cell_type": "code", + "execution_count": 564, + "id": "459df580", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA90AAAHqCAYAAAAZLi26AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy88F64QAAAACXBIWXMAAA9hAAAPYQGoP6dpAABdyklEQVR4nO3de1yUZf7/8fcAAh44iAoDiYhHPGGkRqylpCYiWaZtaaZYppuhrdqRMg/UhmvH1UzzW2nbam62pWWl4XlNNLXIU7rialoKmgZ4RIT790fL/BoBRZybGeD1fDzux4O5ruu+5nPdM8w1n7lPFsMwDAEAAAAAAIdzc3YAAAAAAABUVyTdAAAAAACYhKQbAAAAAACTkHQDAAAAAGASkm4AAAAAAExC0g0AAAAAgElIugEAAAAAMAlJNwAAAAAAJiHpBgAAAADAJCTdqHFiY2MVGxvr7DCqrLVr18pisWjt2rXODgXldPDgQVksFr388svODgVANcA8em2YR1GZpkyZIovFol9++cXZodRoJN2oEnbs2KG7775bYWFh8vb21nXXXafbbrtNM2fOvKZ+Y2NjZbFYrrhMmTJFktS0aVPdfvvtl+1z+PDhduvWq1dPzZo10913361//etfKioqumIctWvXVmRkpF5//fVS21/Jm2++qfnz51/1es40f/78cr0WTZs2ddhzfvHFF7bXtjxiY2PVvn17hz2/o13teADUHMyjV6cqzqPFioqK9Pe//13R0dEKCAiQj4+PWrVqpWHDhmnTpk3ODq9Kqgo/Xr/44otasmSJs8NAGTycHQBwJRs3btStt96qJk2aaOTIkbJarTp8+LA2bdqkv/3tbxo7duxV9ffVV1/Z/n722Wf10EMP2R5v2bJFM2bM0DPPPKM2bdrYyiMjI6/qOby8vPT2229Lks6dO6cff/xRn332me6++27FxsZq6dKl8vX1tVuncePGSk1NlST98ssvWrhwocaPH6/jx4/rL3/5y1U9/5tvvqmGDRtq+PDhV7VeeXTr1k3nzp2Tp6enw/t9//337coeeugh3XjjjRo1apStrF69eg57zi+++EKzZs2qNolqdRsPAMdgHq0Z82ixRx99VLNmzdKdd96pIUOGyMPDQ3v37tWXX36pZs2a6aabbjLleeFcL774ou6++27179/f2aGgFCTdcHl/+ctf5Ofnpy1btsjf39+u7tixY1fd3+8nudtuu82uztvbWzNmzNBtt912TYfOeXh46P7777cre+GFFzRt2jQlJydr5MiR+uc//2lX7+fnZ7fOww8/rIiICM2cOVMpKSlyd3evcDyXc+bMGdWtW7fc7d3c3OTt7e3wOJo1a6ZmzZrZlT388MNq1qxZiW0JACg/5tGaMY9KUnZ2tt58802NHDlSc+fOtat7/fXXdfz4cVOe15WdP39enp6ecnPjAF84D+8+uLz9+/erXbt2Jb4oSFJgYGCJsn/84x+68cYbVadOHdWvX1/dunWz+1XemeeiPf300+rdu7cWL16s//znP5dt6+3trS5duujUqVNX9aWoadOm2rVrl9atW2c7zK54vMWHcK9bt06PPPKIAgMD1bhxY0nSjz/+qEceeUStW7dW7dq11aBBA/3xj3/UwYMH7fov7Vy04sOud+/erVtvvVV16tTRddddp+nTp5c77vL6+eef9eCDDyooKEheXl5q166d3n33XVv9uXPnFBERoYiICJ07d85WfvLkSQUHB+sPf/iDCgsLNXz4cM2aNUuS7A5JdIQvv/xSt9xyi+rWrSsfHx8lJCRo165ddm2GDx+uevXq6eeff1b//v1Vr149NWrUSI8//rgKCwvt2p44cUJDhw6Vr6+v/P39lZiYqO+//14Wi8V2+GN5xzN37lw1b95cXl5e6tKli7Zs2eKQMQNwXcyjNWcePXDggAzDUNeuXUvUWSwWu9e7+FzfSxWP8fdxF58WsHbtWnXu3Fm1a9dWhw4dbGP4+OOP1aFDB3l7e6tTp0767rvv7PosnvMOHTqk22+/XfXq1dN1111nm7d27NihHj16qG7dugoLC9PChQvt1j958qQef/xxdejQQfXq1ZOvr6/i4+P1/fff27Ur3raLFi3SxIkTdd1116lOnTrKyMiQxWLRa6+9VmK8GzdulMVi0QcffHD5jVsO+fn5mjx5slq0aCEvLy+FhobqySefVH5+vl07i8WiMWPGaMmSJWrfvr3t+8zy5ctL9Fm8zb29vdW8eXO99dZbJV47i8WiM2fO6L333rO9Zy89SiMnJ0fDhw+Xv7+//Pz89MADD+js2bPXPGaUD3u64fLCwsKUnp6unTt3XvF82qlTp2rKlCn6wx/+oJSUFHl6emrz5s1avXq1evfuXUkRX97QoUP11VdfKS0tTa1atbps2+JziEr7olSW119/XWPHjlW9evX07LPPSpKCgoLs2jzyyCNq1KiRJk2apDNnzkj67ZDAjRs3atCgQWrcuLEOHjyo2bNnKzY2Vrt371adOnUu+7y//vqr+vTpowEDBuiee+7RRx99pKeeekodOnRQfHx8ueO/nOzsbN100022yapRo0b68ssvNWLECOXl5WncuHGqXbu23nvvPXXt2lXPPvusXn31VUlSUlKScnNzNX/+fLm7u+tPf/qTjhw5orS0tBKHtV+L999/X4mJiYqLi9Nf//pXnT17VrNnz9bNN9+s7777zu6c9MLCQsXFxSk6Olovv/yyVq5cqVdeeUXNmzfX6NGjJf12bl6/fv30zTffaPTo0YqIiNDSpUuVmJho97zlGc/ChQt16tQp/elPf5LFYtH06dM1YMAA/fe//1WtWrUctg0AuBbm0Zozj4aFhUmSFi9erD/+8Y9XfM6rkZmZqfvuu09/+tOfdP/99+vll19Wv379NGfOHD3zzDN65JFHJEmpqam65557tHfvXru9y4WFhYqPj1e3bt00ffp0LViwQGPGjFHdunX17LPPasiQIRowYIDmzJmjYcOGKSYmRuHh4ZKk//73v1qyZIn++Mc/Kjw8XNnZ2XrrrbfUvXt37d69WyEhIXaxPv/88/L09NTjjz+u/Px8RUREqGvXrlqwYIHGjx9v13bBggXy8fHRnXfeeU3bp6ioSHfccYc2bNigUaNGqU2bNtqxY4dee+01/ec//ylxvvWGDRv08ccf65FHHpGPj49mzJihgQMH6tChQ2rQoIEk6bvvvlOfPn0UHBysqVOnqrCwUCkpKWrUqJFdX++//36JU/KaN29u1+aee+5ReHi4UlNT9e233+rtt99WYGCg/vrXv17TuFFOBuDivvrqK8Pd3d1wd3c3YmJijCeffNJYsWKFceHCBbt2+/btM9zc3Iy77rrLKCwstKsrKiqy/d29e3eje/fupT7X4sWLDUnGmjVrSq0PCwszEhISLhtvYmKiUbdu3TLrv/vuO0OSMX78eLuYIiIijOPHjxvHjx839uzZYzzxxBOGpCs+X2natWtX6hjnzZtnSDJuvvlm4+LFi3Z1Z8+eLdE+PT3dkGT8/e9/t5WtWbOmxDbq3r17iXb5+fmG1Wo1Bg4ceNXxF6tbt66RmJhoezxixAgjODjY+OWXX+zaDRo0yPDz87MbQ3JysuHm5masX7/e9rq+/vrrduslJSUZV/Mx2L17d6Ndu3Zl1p86dcrw9/c3Ro4caVeelZVl+Pn52ZUnJiYakoyUlBS7tlFRUUanTp1sj//1r3+ViL2wsNDo0aOHIcmYN2/eFcdz4MABQ5LRoEED4+TJk7bypUuXGpKMzz777MqDB1BlMY/WrHl02LBhhiSjfv36xl133WW8/PLLxg8//FCi3eTJk0udM4rHeODAAVtZWFiYIcnYuHGjrWzFihWGJKN27drGjz/+aCt/6623SoyveM578cUXbWW//vqrUbt2bcNisRiLFi2yle/Zs8eQZEyePNlWdv78+RLvyQMHDhheXl5282jxtm3WrFmJ16M4rt9viwsXLhgNGza0+65RmuJ59KWXXiqzzfvvv2+4ubkZ//73v+3K58yZY0gyvv76a1uZJMPT09PIzMy0lX3//feGJGPmzJm2sn79+hl16tQxfv75Z1vZvn37DA8PjxKv3aXfmYoVv84PPvigXfldd91lNGjQ4LLjhuNweDlc3m233ab09HTdcccd+v777zV9+nTFxcXpuuuu06effmprt2TJEhUVFWnSpEklzttx1GHDjlB8IbBTp07Zle/Zs0eNGjVSo0aNFBERoZdeekl33HGHKVdPHTlyZIlz22rXrm37u6CgQCdOnFCLFi3k7++vb7/99op91qtXz+5cOk9PT914443673//65CYDcPQv/71L/Xr10+GYeiXX36xLXFxccrNzbWLc8qUKWrXrp0SExP1yCOPqHv37nr00UcdEktZ0tLSlJOTo8GDB9vF5+7urujoaK1Zs6bEOg8//LDd41tuucVumy1fvly1atXSyJEjbWVubm5KSkq66vjuvfde1a9f3+65JDnsNQLgmphH5zs8BleeR+fNm6c33nhD4eHh+uSTT/T444+rTZs26tmzp37++eer6uv32rZtq5iYGNvj6OhoSVKPHj3UpEmTEuWlxf37i+75+/urdevWqlu3ru655x5beevWreXv72+3vpeXl+09WVhYqBMnTqhevXpq3bp1qds2MTHR7vWQftvT6+3trQULFtjKVqxYoV9++cUh145ZvHix2rRpo4iICLvvAD169JCkEt8BevXqZbc3OjIyUr6+vrZxFxYWauXKlerfv7/dnvwWLVpU6AjC0r5vnDhxQnl5eVfdF64eSTeqhC5duujjjz/Wr7/+qm+++UbJyck6deqU7r77bu3evVvSb+esubm5qW3btk6O9vJOnz4tSfLx8bErb9q0qdLS0rRixQq9+eabuu6663T8+HFTLrZSfLjW7507d06TJk1SaGiovLy81LBhQzVq1Eg5OTnKzc29Yp+NGzcu8aWsfv36+vXXXx0S8/Hjx5WTk6O5c+favlQVLw888IAk+wsCeXp66t1339WBAwd06tQpzZs3z/Qvjfv27ZP02xeQS2P86quvSpxT6O3tXeIQsUu32Y8//qjg4OAShwi2aNHiquP7/Zei4ueS5LDXCIDrYh51LFeeR4t/mN22bZt++eUXLV26VPHx8Vq9erUGDRp0VX393qVziJ+fnyQpNDS01PJL4y5tzvPz8yt13H5+fnbrFxUV6bXXXlPLli3ttu327dtL3balvT7+/v7q16+f3fniCxYs0HXXXWdLjK/Fvn37tGvXrhLzf/EpEJd+B7h0e0r2r/exY8d07ty5Uud7vgNUPZzTjSrF09NTXbp0UZcuXdSqVSs98MADWrx4sSZPnuzs0Mpt586dkkp+YNatW1e9evWyPe7atatuuOEGPfPMM5oxY4ZDY7j0119JGjt2rObNm6dx48YpJiZGfn5+slgsGjRoULnucVrWVWENw7jmeCXZYrj//vtLnM9c7NJb0qxYsULSb1cu3bdvX6mTsCMVx/j+++/LarWWqPfwsP/INetKumUx+zUC4PqYRx2jqsyjDRo00B133KE77rhDsbGxWrdunX788UeFhYWV+UP0pRfzvFJ85Y37WtZ/8cUX9dxzz+nBBx/U888/r4CAALm5uWncuHGlbtvSXh9JGjZsmBYvXqyNGzeqQ4cO+vTTT/XII4845MrmRUVF6tChg+1aMpe69MeJyp6T+Q7gXCTdqLI6d+4sSTp69Kik3y4YUVRUpN27d+v66693YmSX9/7778tisZS4zcqlIiMjdf/99+utt97S448/XuovomWpyB7djz76SImJiXrllVdsZefPn1dOTs5V92WGRo0aycfHR4WFhXZfqsqyfft2paSk6IEHHlBGRoYeeugh7dixw/YLvOT4wyWLDxMLDAwsV4zlERYWpjVr1ujs2bN2e7szMzNLtHWlwz8BuD7m0bJVx3m0c+fOWrdunY4ePaqwsDDbns6cnBy7C839+OOPToqwbB999JFuvfVWvfPOO3blOTk5atiwYbn76dOnjxo1aqQFCxYoOjpaZ8+e1dChQx0SY/PmzfX999+rZ8+eDpmPAwMD5e3tXep8z3eAqofDy+Hy1qxZU+qvcF988YWk3879kaT+/fvLzc1NKSkpJX71dJVf8aZNm6avvvpK9957r1q2bHnF9k8++aQKCgrK/NW0LHXr1r3qSd7d3b3Edpo5c2aZv3hXNnd3dw0cOFD/+te/bHs5fu/39x4tKCjQ8OHDFRISor/97W+aP3++srOzS1yxtPi+qo76QhQXFydfX1+9+OKLKigouGyMV9NnQUGB/u///s9WVlRUZLvNyu85ejwAqgfm0Zozj2ZlZdlOF/i9CxcuaNWqVXJzc7MdIVD8Q/H69ett7YpvO+VqStu2ixcvvupz1D08PDR48GB9+OGHmj9/vjp06FDiKLmKuueee/Tzzz/bzdfFzp07Z7vKfXm5u7urV69eWrJkiY4cOWIrz8zM1JdfflmifUXes6g87OmGyxs7dqzOnj2ru+66SxEREbpw4YI2btyof/7zn2ratKntfN4WLVro2Wef1fPPP69bbrlFAwYMkJeXl7Zs2aKQkBClpqY6JJ7MzEy98MILJcqjoqKUkJAgSbp48aL+8Y9/SPrtV+4ff/xRn376qbZv365bb71Vc+fOLddztW3bVn379tXbb7+t5557znYLiSvp1KmTZs+erRdeeEEtWrRQYGDgFc9Xuv322/X+++/Lz89Pbdu2VXp6ulauXFnu56wM06ZN05o1axQdHa2RI0eqbdu2OnnypL799lutXLlSJ0+elCS98MILysjI0KpVq+Tj46PIyEhNmjRJEydO1N13362+fftK+m07SdKjjz6quLg4ubu7X/F8t+PHj5f6+oeHh2vIkCGaPXu2hg4dqhtuuEGDBg1So0aNdOjQIX3++efq2rWr3njjjasac//+/XXjjTfqscceU2ZmpiIiIvTpp5/axvr7X7YrMh4A1R/zaM2ZR3/66SfdeOON6tGjh3r27Cmr1apjx47pgw8+0Pfff69x48bZ9gz37t1bTZo00YgRI/TEE0/I3d1d7777rm3eciW333677ei1P/zhD9qxY4cWLFigZs2aXXVfw4YN04wZM7RmzZqrvl3WqlWrdP78+RLl/fv319ChQ/Xhhx/q4Ycf1po1a9S1a1cVFhZqz549+vDDD7VixQrb0SXlNWXKFH311Vfq2rWrRo8ercLCQr3xxhtq3769MjIy7Np26tRJK1eu1KuvvqqQkBCFh4fbLmoHF1D5F0wHrs6XX35pPPjgg0ZERIRRr149w9PT02jRooUxduxYIzs7u0T7d99914iKijK8vLyM+vXrG927dzfS0tJs9dd6qxNJpS4jRowwDOP/3xajeKlTp47RtGlTY+DAgcZHH31U4pYXxTGVdSuqtWvXlrh1xpVkZWUZCQkJho+PjyHJNt7i24Bs2bKlxDq//vqr8cADDxgNGzY06tWrZ8TFxRl79uwxwsLC7G5BUdatTkqLPzEx0QgLCyt33Jcq7fYX2dnZRlJSkhEaGmrUqlXLsFqtRs+ePY25c+cahmEY27ZtMzw8PIyxY8farXfx4kWjS5cuRkhIiPHrr7/aysaOHWs0atTIsFgsV7x9WPEtXUpbevbsaWu3Zs0aIy4uzvDz8zO8vb2N5s2bG8OHDze2bt1qt21KuyVOabdwOX78uHHfffcZPj4+hp+fnzF8+HDj66+/NiTZ3WalrPFc7lYnV/veAlD1MI/WnHk0Ly/P+Nvf/mbExcUZjRs3NmrVqmX4+PgYMTExxv/93//Z3frNMH6bM6Ojow1PT0+jSZMmxquvvlrmLcNKu/WaJCMpKcmurLQ5p6w5r6xxX/p858+fNx577DEjODjYqF27ttG1a1cjPT29xHuxeNsuXrz4stupXbt2hpubm/HTTz9dtt2lYypref/99w3D+O0WZH/961+Ndu3a2f5/OnXqZEydOtXIzc219Vfadise96Xfe1atWmVERUUZnp6eRvPmzY23337beOyxxwxvb2+7dnv27DG6detm1K5d25Bk66f4e8Xx48ft2pf2OsM8FsNwkeOFAADltmTJEt11113asGGDunbt6uxwAACoMqKiohQQEKBVq1Y5O5QK6d+/v3bt2mW7awpcH+d0A4CLO3funN3jwsJCzZw5U76+vrrhhhucFBUAAFXP1q1blZGRoWHDhjk7lHK59DvAvn379MUXXyg2NtY5AaFCOKcbqEKOHz9+2QuyeHp6KiAgoBIjKr+qHLuzjR07VufOnVNMTIzy8/P18ccfa+PGjXrxxRfLvC0KAKCkqjwXVeXYXcHOnTu1bds2vfLKKwoODta9997r7JDKpVmzZho+fLiaNWumH3/8UbNnz5anp6eefPJJZ4eGq0DSDVQhXbp0ueytPLp37661a9dWXkBXoSrH7mw9evTQK6+8omXLlun8+fNq0aKFZs6cqTFjxjg7NACoUqryXFSVY3cFH330kVJSUtS6dWt98MEH8vb2dnZI5dKnTx998MEHysrKkpeXl2JiYvTiiy+W6+r9cB2c0w1UIV9//XWJw4x+r379+rYrWLuaqhw7AKB6qMpzUVWOHajpSLoBAAAAADCJUy+kNnv2bEVGRsrX11e+vr6KiYmxu9n7+fPnlZSUpAYNGqhevXoaOHCgsrOz7fo4dOiQEhISVKdOHQUGBuqJJ57QxYsXK3soAAAAAACU4NRzuhs3bqxp06apZcuWMgxD7733nu6880599913ateuncaPH6/PP/9cixcvlp+fn8aMGaMBAwbo66+/lvTbFXwTEhJktVq1ceNGHT16VMOGDVOtWrX04osvljuOoqIiHTlyRD4+PrJYLGYNFwCAq2YYhk6dOqWQkBC5udW8m44wRwMAXFW552in3SG8DPXr1zfefvttIycnx6hVq5bdze1/+OEHQ5KRnp5uGIZhfPHFF4abm5uRlZVlazN79mzD19fXyM/PL/dzHj58+LI3vGdhYWFhYXH2cvjwYcdNtlUIczQLCwsLi6svV5qjXebq5YWFhVq8eLHOnDmjmJgYbdu2TQUFBerVq5etTUREhJo0aaL09HTddNNNSk9PV4cOHRQUFGRrExcXp9GjR2vXrl2Kiooq9bny8/OVn59ve2z877T2w4cPy9fX16QRAgBw9fLy8hQaGiofHx9nh+IUxeNmjgYAuJryztFOT7p37NihmJgYnT9/XvXq1dMnn3yitm3bKiMjQ56envL397drHxQUpKysLElSVlaWXcJdXF9cV5bU1FRNnTq1RHnxueUAALiamnpodfG4maMBAK7qSnO0008Oa926tTIyMrR582aNHj1aiYmJ2r17t6nPmZycrNzcXNty+PBhU58PAAAAAFAzOX1Pt6enp1q0aCFJ6tSpk7Zs2aK//e1vuvfee3XhwgXl5OTY7e3Ozs6W1WqVJFmtVn3zzTd2/RVf3by4TWm8vLzk5eXl4JEAAAAAAGDP6Xu6L1VUVKT8/Hx16tRJtWrV0qpVq2x1e/fu1aFDhxQTEyNJiomJ0Y4dO3Ts2DFbm7S0NPn6+qpt27aVHjsAAAAAAL/n1D3dycnJio+PV5MmTXTq1CktXLhQa9eu1YoVK+Tn56cRI0ZowoQJCggIkK+vr8aOHauYmBjddNNNkqTevXurbdu2Gjp0qKZPn66srCxNnDhRSUlJ7MkGAAAAADidU5PuY8eOadiwYTp69Kj8/PwUGRmpFStW6LbbbpMkvfbaa3Jzc9PAgQOVn5+vuLg4vfnmm7b13d3dtWzZMo0ePVoxMTGqW7euEhMTlZKS4qwhAQAAAABgYzGK75dVg+Xl5cnPz0+5ublcGRUA4FJq+hxV08cPAHBd5Z2jXO6cbgAAAAAAqguSbgAAAAAATELSDQAAAACASUi6AQAAAAAwCUk3AAAAAAAmIekGAAAAAMAkJN0AAKCE1NRUdenSRT4+PgoMDFT//v21d+9euzbnz59XUlKSGjRooHr16mngwIHKzs62a3Po0CElJCSoTp06CgwM1BNPPKGLFy9W5lAAAHAqkm4AAFDCunXrlJSUpE2bNiktLU0FBQXq3bu3zpw5Y2szfvx4ffbZZ1q8eLHWrVunI0eOaMCAAbb6wsJCJSQk6MKFC9q4caPee+89zZ8/X5MmTXLGkAAAcAqLYRiGs4NwtvLe1BwAgMrmKnPU8ePHFRgYqHXr1qlbt27Kzc1Vo0aNtHDhQt19992SpD179qhNmzZKT0/XTTfdpC+//FK33367jhw5oqCgIEnSnDlz9NRTT+n48ePy9PS84vO6yvgBALhUeeco9nQDAIArys3NlSQFBARIkrZt26aCggL16tXL1iYiIkJNmjRRenq6JCk9PV0dOnSwJdySFBcXp7y8PO3atasSowcAwHk8nB0AAABwbUVFRRo3bpy6du2q9u3bS5KysrLk6ekpf39/u7ZBQUHKysqytfl9wl1cX1xXmvz8fOXn59se5+XlOWoYAAA4BUk3YKKmT39uav8HpyWY2j8ASFJSUpJ27typDRs2mP5cqampmjp1qunPA+dx5NzIPAigKuDwcgAAUKYxY8Zo2bJlWrNmjRo3bmwrt1qtunDhgnJycuzaZ2dny2q12tpcejXz4sfFbS6VnJys3Nxc23L48GEHjgYAgMpH0g0AAEowDENjxozRJ598otWrVys8PNyuvlOnTqpVq5ZWrVplK9u7d68OHTqkmJgYSVJMTIx27NihY8eO2dqkpaXJ19dXbdu2LfV5vby85Ovra7cAAFCVcXg5AAAoISkpSQsXLtTSpUvl4+NjOwfbz89PtWvXlp+fn0aMGKEJEyYoICBAvr6+Gjt2rGJiYnTTTTdJknr37q22bdtq6NChmj59urKysjRx4kQlJSXJy8vLmcMDAKDSkHQDAIASZs+eLUmKjY21K583b56GDx8uSXrttdfk5uamgQMHKj8/X3FxcXrzzTdtbd3d3bVs2TKNHj1aMTExqlu3rhITE5WSklJZwwAAwOlIugEAQAmGYVyxjbe3t2bNmqVZs2aV2SYsLExffPGFI0MDAKBK4ZxuAAAAAABMwp5uAAAAXJbZt8AEgOqMPd0AAAAAAJiEpBsAAAAAAJOQdAMAAAAAYBKSbgAAAAAATELSDQAAAACASUi6AQAAAAAwCUk3AAAAAAAmIekGAAAAAMAkJN0AAAAAAJiEpBsAAAAAAJOQdAMAAAAAYBKSbgAAAAAATELSDQAAAACASUi6AQAAAAAwCUk3AAAAAAAmIekGAAAAAMAkJN0AAAAAAJiEpBsAAAAAAJOQdAMAAAAAYBKSbgAAAAAATELSDQAAAACASUi6AQAAAAAwiYezAwAAAAAqounTnzu0v4PTEhzaHwBI7OkGAAAAAMA0JN0AAAAAAJiEpBsAAAAAAJOQdAMAAAAAYBKSbgAAAAAATMLVy+HSHH1V0ktxlVIAAAAAZmJPNwAAAAAAJiHpBgAAAADAJCTdAAAAAACYhKQbAAAAAACTkHQDAAAAAGASkm4AAAAAAExC0g0AAEpYv369+vXrp5CQEFksFi1ZssSu3mKxlLq89NJLtjZNmzYtUT9t2rRKHgkAAM5F0g0AAEo4c+aMOnbsqFmzZpVaf/ToUbvl3XfflcVi0cCBA+3apaSk2LUbO3ZsZYQPAIDLcGrSnZqaqi5dusjHx0eBgYHq37+/9u7da9cmNja2xK/kDz/8sF2bQ4cOKSEhQXXq1FFgYKCeeOIJXbx4sTKHAgBAtRIfH68XXnhBd911V6n1VqvVblm6dKluvfVWNWvWzK6dj4+PXbu6detWRvgAALgMpybd69atU1JSkjZt2qS0tDQVFBSod+/eOnPmjF27kSNH2v1KPn36dFtdYWGhEhISdOHCBW3cuFHvvfee5s+fr0mTJlX2cAAAqJGys7P1+eefa8SIESXqpk2bpgYNGigqKkovvfQSP4oDAGocD2c++fLly+0ez58/X4GBgdq2bZu6detmK69Tp46sVmupfXz11VfavXu3Vq5cqaCgIF1//fV6/vnn9dRTT2nKlCny9PQ0dQwAANR07733nnx8fDRgwAC78kcffVQ33HCDAgICtHHjRiUnJ+vo0aN69dVXy+wrPz9f+fn5tsd5eXmmxQ0AQGVwqXO6c3NzJUkBAQF25QsWLFDDhg3Vvn17JScn6+zZs7a69PR0dejQQUFBQbayuLg45eXladeuXaU+T35+vvLy8uwWAABQMe+++66GDBkib29vu/IJEyYoNjZWkZGRevjhh/XKK69o5syZdkn1pVJTU+Xn52dbQkNDzQ4fAABTuUzSXVRUpHHjxqlr165q3769rfy+++7TP/7xD61Zs0bJycl6//33df/999vqs7Ky7BJuSbbHWVlZpT4XEzoAAI7x73//W3v37tVDDz10xbbR0dG6ePGiDh48WGab5ORk5ebm2pbDhw87MFoAACqfUw8v/72kpCTt3LlTGzZssCsfNWqU7e8OHTooODhYPXv21P79+9W8efMKPVdycrImTJhge5yXl0fiDQBABbzzzjvq1KmTOnbseMW2GRkZcnNzU2BgYJltvLy85OXl5cgQAQBwKpdIuseMGaNly5Zp/fr1aty48WXbRkdHS5IyMzPVvHlzWa1WffPNN3ZtsrOzJanM88CZ0FGs6dOfOzsEAHBJp0+fVmZmpu3xgQMHlJGRoYCAADVp0kTSbz9aL168WK+88kqJ9dPT07V582bdeuut8vHxUXp6usaPH6/7779f9evXr7RxAADgbE49vNwwDI0ZM0affPKJVq9erfDw8Cuuk5GRIUkKDg6WJMXExGjHjh06duyYrU1aWpp8fX3Vtm1bU+IGAKC627p1q6KiohQVFSXpt/Ozo6Ki7O4OsmjRIhmGocGDB5dY38vLS4sWLVL37t3Vrl07/eUvf9H48eM1d+7cShsDAACuwKl7upOSkrRw4UItXbpUPj4+tnOw/fz8VLt2be3fv18LFy5U37591aBBA23fvl3jx49Xt27dFBkZKUnq3bu32rZtq6FDh2r69OnKysrSxIkTlZSUxN5sAAAqKDY2VoZhXLbNqFGj7E4D+70bbrhBmzZtMiM0AACqFKfu6Z49e7Zyc3MVGxur4OBg2/LPf/5TkuTp6amVK1eqd+/eioiI0GOPPaaBAwfqs88+s/Xh7u6uZcuWyd3dXTExMbr//vs1bNgwpaSkOGtYAAAAAABIcvKe7iv9gh4aGqp169ZdsZ+wsDB98cUXjgoLAAAAAACHcJlbhgEAAAAAUN2QdAMAAAAAYBKSbgAAAAAATELSDQAAAACASUi6AQAAAAAwiVOvXg4AAIDfNH36c4f1dXBagsP6AgBcG/Z0AwAAAABgEpJuAAAAAABMQtINAAAAAIBJSLoBAAAAADAJSTcAAAAAACYh6QYAAAAAwCQk3QAAAAAAmISkGwAAAAAAk5B0AwAAAABgEpJuAAAAAABMQtINAAAAAIBJSLoBAAAAADAJSTcAAAAAACYh6QYAAAAAwCQk3QAAAAAAmISkGwAAAAAAk5B0AwAAAABgEpJuAAAAAABM4uHsAABUXNOnPze1/4PTEkztHwAAAKju2NMNAAAAAIBJSLoBAAAAADAJSTcAAAAAACYh6QYAAAAAwCQk3QAAAAAAmISkGwAAAAAAk5B0AwAAAABgEpJuAAAAAABMQtINAAAAAIBJSLoBAAAAADAJSTcAAChh/fr16tevn0JCQmSxWLRkyRK7+uHDh8tisdgtffr0sWtz8uRJDRkyRL6+vvL399eIESN0+vTpShwFAADOR9INAABKOHPmjDp27KhZs2aV2aZPnz46evSobfnggw/s6ocMGaJdu3YpLS1Ny5Yt0/r16zVq1CizQwcAwKV4ODsAAADgeuLj4xUfH3/ZNl5eXrJaraXW/fDDD1q+fLm2bNmizp07S5Jmzpypvn376uWXX1ZISIjDYwYAwBWxpxsAAFTI2rVrFRgYqNatW2v06NE6ceKErS49PV3+/v62hFuSevXqJTc3N23evNkZ4QIA4BTs6QYAAFetT58+GjBggMLDw7V//34988wzio+PV3p6utzd3ZWVlaXAwEC7dTw8PBQQEKCsrKwy+83Pz1d+fr7tcV5enmljAACgMpB0AwCAqzZo0CDb3x06dFBkZKSaN2+utWvXqmfPnhXuNzU1VVOnTnVEiAAAuAQOLwcAANesWbNmatiwoTIzMyVJVqtVx44ds2tz8eJFnTx5sszzwCUpOTlZubm5tuXw4cOmxg0AgNlIugEAwDX76aefdOLECQUHB0uSYmJilJOTo23bttnarF69WkVFRYqOji6zHy8vL/n6+totAABUZRxeDgAASjh9+rRtr7UkHThwQBkZGQoICFBAQICmTp2qgQMHymq1av/+/XryySfVokULxcXFSZLatGmjPn36aOTIkZozZ44KCgo0ZswYDRo0iCuXAwBqFPZ0AwCAErZu3aqoqChFRUVJkiZMmKCoqChNmjRJ7u7u2r59u+644w61atVKI0aMUKdOnfTvf/9bXl5etj4WLFigiIgI9ezZU3379tXNN9+suXPnOmtIAAA4BXu6AQBACbGxsTIMo8z6FStWXLGPgIAALVy40JFhAQBQ5ZB0AwAAVFDTpz93dggAABfH4eUAAAAAAJiEpBsAAAAAAJOQdAMAAAAAYBKSbgAAAAAATELSDQAAAACASUi6AQAAAAAwCUk3AAAAAAAmIekGAAAAAMAkJN0AAAAAAJjEqUl3amqqunTpIh8fHwUGBqp///7au3evXZvz588rKSlJDRo0UL169TRw4EBlZ2fbtTl06JASEhJUp04dBQYG6oknntDFixcrcygAAAAAAJTg4cwnX7dunZKSktSlSxddvHhRzzzzjHr37q3du3erbt26kqTx48fr888/1+LFi+Xn56cxY8ZowIAB+vrrryVJhYWFSkhIkNVq1caNG3X06FENGzZMtWrV0osvvujM4QEAAKCGavr05w7r6+C0BIf1BaDyOTXpXr58ud3j+fPnKzAwUNu2bVO3bt2Um5urd955RwsXLlSPHj0kSfPmzVObNm20adMm3XTTTfrqq6+0e/durVy5UkFBQbr++uv1/PPP66mnntKUKVPk6enpjKEBAAAAAOBa53Tn5uZKkgICAiRJ27ZtU0FBgXr16mVrExERoSZNmig9PV2SlJ6erg4dOigoKMjWJi4uTnl5edq1a1epz5Ofn6+8vDy7BQAAAAAAR3OZpLuoqEjjxo1T165d1b59e0lSVlaWPD095e/vb9c2KChIWVlZtja/T7iL64vrSpOamio/Pz/bEhoa6uDRAAAAAADgQkl3UlKSdu7cqUWLFpn+XMnJycrNzbUthw8fNv05AQAAAAA1j1PP6S42ZswYLVu2TOvXr1fjxo1t5VarVRcuXFBOTo7d3u7s7GxZrVZbm2+++cauv+Krmxe3uZSXl5e8vLwcPAoAAAAAAOw5dU+3YRgaM2aMPvnkE61evVrh4eF29Z06dVKtWrW0atUqW9nevXt16NAhxcTESJJiYmK0Y8cOHTt2zNYmLS1Nvr6+atu2beUMBAAAAACAUjh1T3dSUpIWLlyopUuXysfHx3YOtp+fn2rXri0/Pz+NGDFCEyZMUEBAgHx9fTV27FjFxMTopptukiT17t1bbdu21dChQzV9+nRlZWVp4sSJSkpKYm82AAAAAMCpnJp0z549W5IUGxtrVz5v3jwNHz5ckvTaa6/Jzc1NAwcOVH5+vuLi4vTmm2/a2rq7u2vZsmUaPXq0YmJiVLduXSUmJiolJaWyhgEAAAAAQKmcmnQbhnHFNt7e3po1a5ZmzZpVZpuwsDB98cUXjgwNAAAAAIBr5jJXLwcAAAAAoLoh6QYAAAAAwCQk3QAAAAAAmISkGwAAAAAAk5B0AwAAAABgEpJuAAAAAABMQtINAAAAAIBJSLoBAAAAADAJSTcAAAAAACYh6QYAAAAAwCQk3QAAAAAAmISkGwAAAAAAk5B0AwAAAABgEpJuAAAAAABMQtINAAAAAIBJSLoBAAAAADAJSTcAAAAAACYh6QYAACWsX79e/fr1U0hIiCwWi5YsWWKrKygo0FNPPaUOHTqobt26CgkJ0bBhw3TkyBG7Ppo2bSqLxWK3TJs2rZJHAgCAc5F0AwCAEs6cOaOOHTtq1qxZJerOnj2rb7/9Vs8995y+/fZbffzxx9q7d6/uuOOOEm1TUlJ09OhR2zJ27NjKCB8AAJfh4ewAAACA64mPj1d8fHypdX5+fkpLS7Mre+ONN3TjjTfq0KFDatKkia3cx8dHVqvV1FgBAHBl7OkGAADXLDc3VxaLRf7+/nbl06ZNU4MGDRQVFaWXXnpJFy9evGw/+fn5ysvLs1sAAKjK2NMNAACuyfnz5/XUU09p8ODB8vX1tZU/+uijuuGGGxQQEKCNGzcqOTlZR48e1auvvlpmX6mpqZo6dWplhA0AQKUg6QYAABVWUFCge+65R4ZhaPbs2XZ1EyZMsP0dGRkpT09P/elPf1Jqaqq8vLxK7S85Odluvby8PIWGhpoTPAAAlYCkG9ek6dOfOzsEAICTFCfcP/74o1avXm23l7s00dHRunjxog4ePKjWrVuX2sbLy6vMhBwAgKqIpBsAAFy14oR73759WrNmjRo0aHDFdTIyMuTm5qbAwMBKiBAAANdA0g0AAEo4ffq0MjMzbY8PHDigjIwMBQQEKDg4WHfffbe+/fZbLVu2TIWFhcrKypIkBQQEyNPTU+np6dq8ebNuvfVW+fj4KD09XePHj9f999+v+vXrO2tYAABUOpJuAABQwtatW3XrrbfaHhefZ52YmKgpU6bo008/lSRdf/31duutWbNGsbGx8vLy0qJFizRlyhTl5+crPDxc48ePtztfGwCAmoCkGwAAlBAbGyvDMMqsv1ydJN1www3atGmTo8MCAKDK4T7dAAAAAACYhKQbAAAAAACTkHQDAAAAAGASkm4AAAAAAExC0g0AAAAAgElIugEAAAAAMAlJNwAAAAAAJiHpBgAAAADAJCTdAAAAAACYhKQbAAAAAACTkHQDAAAAAGASkm4AAAAAAExC0g0AAAAAgEkqlHQ3a9ZMJ06cKFGek5OjZs2aXXNQAAAAAABUBx4VWengwYMqLCwsUZ6fn6+ff/75moMC4BqaPv25qf0fnJZgav8AAACAs11V0v3pp5/a/l6xYoX8/PxsjwsLC7Vq1So1bdrUYcEBAAAAAFCVXVXS3b9/f0mSxWJRYmKiXV2tWrXUtGlTvfLKKw4LDgAAAACAquyqku6ioiJJUnh4uLZs2aKGDRuaEhQAAAAAANVBhc7pPnDggKPjAAAAAFAKR15jheupAJWvQkm3JK1atUqrVq3SsWPHbHvAi7377rvXHBgAAAAAAFVdhZLuqVOnKiUlRZ07d1ZwcLAsFouj4wIAAHA4s+/KAADApSqUdM+ZM0fz58/X0KFDHR0PAAAAAADVhltFVrpw4YL+8Ic/ODoWAAAAAACqlQol3Q899JAWLlzo6FgAAAAAAKhWKnR4+fnz5zV37lytXLlSkZGRqlWrll39q6++6pDgAAAAAACoyiqUdG/fvl3XX3+9JGnnzp12dVxUDQAAAACA31Qo6V6zZo2j4wAAAAAAoNqp0DndAAAAAADgyiqUdN96663q0aNHmUt5rV+/Xv369VNISIgsFouWLFliVz98+HBZLBa7pU+fPnZtTp48qSFDhsjX11f+/v4aMWKETp8+XZFhAQAAAADgUBU6vLz4fO5iBQUFysjI0M6dO5WYmFjufs6cOaOOHTvqwQcf1IABA0pt06dPH82bN8/22MvLy65+yJAhOnr0qNLS0lRQUKAHHnhAo0aN4urqAAAAAACnq1DS/dprr5VaPmXKlKvayxwfH6/4+PjLtvHy8pLVai217ocfftDy5cu1ZcsWde7cWZI0c+ZM9e3bVy+//LJCQkLKHQsAAAAAAI7m0HO677//fr377ruO7FJr165VYGCgWrdurdGjR+vEiRO2uvT0dPn7+9sSbknq1auX3NzctHnz5jL7zM/PV15ent0CAAAAAICjOTTpTk9Pl7e3t8P669Onj/7+979r1apV+utf/6p169YpPj5ehYWFkqSsrCwFBgbarePh4aGAgABlZWWV2W9qaqr8/PxsS2hoqMNiBgAAAACgWIUOL7/0/GvDMHT06FFt3bpVzz33nEMCk6RBgwbZ/u7QoYMiIyPVvHlzrV27Vj179qxwv8nJyZowYYLtcV5eHok3AAAAAMDhKpR0+/n52T12c3NT69atlZKSot69ezsksNI0a9ZMDRs2VGZmpnr27Cmr1apjx47Ztbl48aJOnjxZ5nng0m/niV96QTYAAAAAABytQkn3768mXpl++uknnThxQsHBwZKkmJgY5eTkaNu2berUqZMkafXq1SoqKlJ0dLRTYgQAAAAAoFiFku5i27Zt0w8//CBJateunaKioq5q/dOnTyszM9P2+MCBA8rIyFBAQIACAgI0depUDRw4UFarVfv379eTTz6pFi1aKC4uTpLUpk0b9enTRyNHjtScOXNUUFCgMWPGaNCgQVy5HAAAAADgdBW6kNqxY8fUo0cPdenSRY8++qgeffRRderUST179tTx48fL3c/WrVsVFRVlS9YnTJigqKgoTZo0Se7u7tq+fbvuuOMOtWrVSiNGjFCnTp3073//2+7Q8AULFigiIkI9e/ZU3759dfPNN2vu3LkVGRYAAPif9evXq1+/fgoJCZHFYtGSJUvs6g3D0KRJkxQcHKzatWurV69e2rdvn12bkydPasiQIfL19ZW/v79GjBhxVbcWBQCgOqjQnu6xY8fq1KlT2rVrl9q0aSNJ2r17txITE/Xoo4/qgw8+KFc/sbGxMgyjzPoVK1ZcsY+AgAAtXLiwfIEDAIByOXPmjDp27KgHH3ywxAVUJWn69OmaMWOG3nvvPYWHh+u5555TXFycdu/ebbuTyZAhQ3T06FGlpaWpoKBADzzwgEaNGsW8DQCoUSqUdC9fvlwrV660JdyS1LZtW82aNcvUC6kBAIDKER8fr/j4+FLrDMPQ66+/rokTJ+rOO++UJP39739XUFCQlixZokGDBumHH37Q8uXLtWXLFnXu3FmSNHPmTPXt21cvv/wyp4EBTtL06c8d2t/BaQkO7Q+ojip0eHlRUZFq1apVorxWrVoqKiq65qAAAIDrOnDggLKystSrVy9bmZ+fn6Kjo5Weni5JSk9Pl7+/vy3hlqRevXrJzc1NmzdvLrPv/Px85eXl2S0AAFRlFUq6e/TooT//+c86cuSIreznn3/W+PHjr+n+2QAAwPVlZWVJkoKCguzKg4KCbHVZWVkKDAy0q/fw8FBAQICtTWlSU1Pl5+dnW0JDQx0cPQAAlatCSfcbb7yhvLw8NW3aVM2bN1fz5s0VHh6uvLw8zZw509ExAgCAGiI5OVm5ubm25fDhw84OCQCAa1Khc7pDQ0P17bffauXKldqzZ4+k327f9fvDzAAAQPVktVolSdnZ2QoODraVZ2dn6/rrr7e1OXbsmN16Fy9e1MmTJ23rl8bLy8vuLiUAAFR1V7Wne/Xq1Wrbtq3y8vJksVh02223aezYsRo7dqy6dOmidu3a6d///rdZsQIAABcQHh4uq9WqVatW2cry8vK0efNmxcTESJJiYmKUk5Ojbdu22dqsXr1aRUVFio6OrvSYAQBwlqva0/36669r5MiR8vX1LVHn5+enP/3pT3r11Vd1yy23OCxAAABQ+U6fPq3MzEzb4wMHDigjI0MBAQFq0qSJxo0bpxdeeEEtW7a03TIsJCRE/fv3l/TbEXB9+vTRyJEjNWfOHBUUFGjMmDEaNGgQVy4HANQoV7Wn+/vvv1efPn3KrO/du7fdL9oAAKBq2rp1q6KiohQVFSVJmjBhgqKiojRp0iRJ0pNPPqmxY8dq1KhR6tKli06fPq3ly5fb7tEtSQsWLFBERIR69uypvn376uabb9bcuXOdMh4AAJzlqvZ0Z2dnl3qrMFtnHh46fvz4NQcFAACcKzY2VoZhlFlvsViUkpKilJSUMtsEBARo4cKFZoQHAECVcVV7uq+77jrt3LmzzPrt27fbXVAFAAAAAICa7KqS7r59++q5557T+fPnS9SdO3dOkydP1u233+6w4AAAAAAAqMqu6vDyiRMn6uOPP1arVq00ZswYtW7dWpK0Z88ezZo1S4WFhXr22WdNCRQAAAAAgKrmqpLuoKAgbdy4UaNHj1ZycrLtXC+LxaK4uDjNmjVLQUFBpgQKAACA8mn69OfODgEA8D9XlXRLUlhYmL744gv9+uuvyszMlGEYatmyperXr29GfAAAAAAAVFlXnXQXq1+/vrp06eLIWAAAAAAAqFau6kJqAAAAAACg/Ei6AQAAAAAwCUk3AAAAAAAmIekGAAAAAMAkJN0AAAAAAJiEpBsAAAAAAJOQdAMAAAAAYBKSbgAAAAAATELSDQAAAACASUi6AQAAAAAwCUk3AAAAAAAmIekGAAAAAMAkJN0AAAAAAJiEpBsAAAAAAJOQdAMAAAAAYBKSbgAAAAAATELSDQAAAACASUi6AQAAAAAwCUk3AAAAAAAm8XB2AABqrqZPf25q/wenJZjaPwAAAHAl7OkGAAAAAMAkJN0AAAAAAJiEw8sBAAAAOJ0jTzvjFDO4EvZ0AwAAAABgEpJuAAAAAABMQtINAAAAAIBJSLoBAAAAADAJSTcAAAAAACYh6QYAAAAAwCQk3QAAAAAAmISkGwAAAAAAk5B0AwCACmnatKksFkuJJSkpSZIUGxtbou7hhx92ctQAAFQuD2cHAAAAqqYtW7aosLDQ9njnzp267bbb9Mc//tFWNnLkSKWkpNge16lTp1JjBADA2Ui6AQBAhTRq1Mju8bRp09S8eXN1797dVlanTh1ZrdbKDg0AAJfB4eUAAOCaXbhwQf/4xz/04IMPymKx2MoXLFighg0bqn379kpOTtbZs2cv209+fr7y8vLsFgAAqjL2dAMAgGu2ZMkS5eTkaPjw4bay++67T2FhYQoJCdH27dv11FNPae/evfr444/L7Cc1NVVTp06thIiBkpo+/bmzQwBQDZF0AwCAa/bOO+8oPj5eISEhtrJRo0bZ/u7QoYOCg4PVs2dP7d+/X82bNy+1n+TkZE2YMMH2OC8vT6GhoeYFDgCAyUi6AQDANfnxxx+1cuXKy+7BlqTo6GhJUmZmZplJt5eXl7y8vBweIwAAzsI53QAA4JrMmzdPgYGBSkhIuGy7jIwMSVJwcHAlRAUAgGtgTzcAAKiwoqIizZs3T4mJifLw+P9fK/bv36+FCxeqb9++atCggbZv367x48erW7duioyMdGLEAABULpJuAABQYStXrtShQ4f04IMP2pV7enpq5cqVev3113XmzBmFhoZq4MCBmjhxopMiBQDAOZx6ePn69evVr18/hYSEyGKxaMmSJXb1hmFo0qRJCg4OVu3atdWrVy/t27fPrs3Jkyc1ZMgQ+fr6yt/fXyNGjNDp06crcRQAANRcvXv3lmEYatWqlV15aGio1q1bpxMnTuj8+fPat2+fpk+fLl9fXydFCgCAczg16T5z5ow6duyoWbNmlVo/ffp0zZgxQ3PmzNHmzZtVt25dxcXF6fz587Y2Q4YM0a5du5SWlqZly5Zp/fr1dldLBQAAAADAWZx6eHl8fLzi4+NLrTMMQ6+//romTpyoO++8U5L097//XUFBQVqyZIkGDRqkH374QcuXL9eWLVvUuXNnSdLMmTPVt29fvfzyy3a3LQEAAAAAoLK57NXLDxw4oKysLPXq1ctW5ufnp+joaKWnp0uS0tPT5e/vb0u4JalXr15yc3PT5s2bKz1mAAAAAAB+z2UvpJaVlSVJCgoKsisPCgqy1WVlZSkwMNCu3sPDQwEBAbY2pcnPz1d+fr7tcV5enqPCBgAAAADAxmWTbjOlpqZq6tSpzg6jUjR9+nNnhwAAAAAANZbLHl5utVolSdnZ2Xbl2dnZtjqr1apjx47Z1V+8eFEnT560tSlNcnKycnNzbcvhw4cdHD0AAAAAAC6cdIeHh8tqtWrVqlW2sry8PG3evFkxMTGSpJiYGOXk5Gjbtm22NqtXr1ZRUZGio6PL7NvLy0u+vr52CwAAAAAAjubUw8tPnz6tzMxM2+MDBw4oIyNDAQEBatKkicaNG6cXXnhBLVu2VHh4uJ577jmFhISof//+kqQ2bdqoT58+GjlypObMmaOCggKNGTNGgwYN4srlAAAAAACnc2rSvXXrVt166622xxMmTJAkJSYmav78+XryySd15swZjRo1Sjk5Obr55pu1fPlyeXt729ZZsGCBxowZo549e8rNzU0DBw7UjBkzKn0sAAAAAABcyqlJd2xsrAzDKLPeYrEoJSVFKSkpZbYJCAjQwoULzQgPAAAAAIBr4rLndAMAAAAAUNWRdAMAAAAAYBKSbgAAAAAATELSDQAAAACASZx6ITUAAAAAcLSmT3/u0P4OTktwaH+oWdjTDQAAAACASUi6AQAAAAAwCUk3AAAAAAAmIekGAAAAAMAkJN0AAAAAAJiEpBsAAAAAAJOQdAMAAAAAYBKSbgAAAAAATELSDQAAAACASUi6AQAAAAAwCUk3AAAAAAAmIekGAAAAAMAkJN0AAAAAAJiEpBsAAAAAAJOQdAMAAAAAYBKSbgAAAAAATELSDQAAAACASUi6AQAAAAAwCUk3AAAAAAAmIekGAAAAAMAkJN0AAAAAAJiEpBsAAAAAAJN4ODsAADBL06c/N7X/g9MSTO0fcHVTpkzR1KlT7cpat26tPXv2SJLOnz+vxx57TIsWLVJ+fr7i4uL05ptvKigoyBnhAgDgFOzpBgAAFdauXTsdPXrUtmzYsMFWN378eH322WdavHix1q1bpyNHjmjAgAFOjBYAgMrHnm4AAFBhHh4eslqtJcpzc3P1zjvvaOHCherRo4ckad68eWrTpo02bdqkm266qbJDBQDAKdjTDQAAKmzfvn0KCQlRs2bNNGTIEB06dEiStG3bNhUUFKhXr162thEREWrSpInS09PL7C8/P195eXl2CwAAVRlJNwAAqJDo6GjNnz9fy5cv1+zZs3XgwAHdcsstOnXqlLKysuTp6Sl/f3+7dYKCgpSVlVVmn6mpqfLz87MtoaGhJo8CAABzcXg5AACokPj4eNvfkZGRio6OVlhYmD788EPVrl27Qn0mJydrwoQJtsd5eXkk3gCAKo093QAAwCH8/f3VqlUrZWZmymq16sKFC8rJybFrk52dXeo54MW8vLzk6+trtwAAUJWRdAMAAIc4ffq09u/fr+DgYHXq1Em1atXSqlWrbPV79+7VoUOHFBMT48QoAQCoXBxeDgAAKuTxxx9Xv379FBYWpiNHjmjy5Mlyd3fX4MGD5efnpxEjRmjChAkKCAiQr6+vxo4dq5iYGK5cDgCoUUi6AQBAhfz0008aPHiwTpw4oUaNGunmm2/Wpk2b1KhRI0nSa6+9Jjc3Nw0cOFD5+fmKi4vTm2++6eSoAQCoXCTdAACgQhYtWnTZem9vb82aNUuzZs2qpIgAAHA9nNMNAAAAAIBJSLoBAAAAADAJSTcAAAAAACYh6QYAAAAAwCQk3QAAAAAAmISkGwAAAAAAk5B0AwAAAABgEpJuAAAAAABMQtINAAAAAIBJSLoBAAAAADAJSTcAAAAAACYh6QYAAAAAwCQk3QAAAAAAmMTD2QEAAAAAqJqaPv25s0MAXB57ugEAAAAAMAlJNwAAAAAAJiHpBgAAAADAJJzTDQAAAACX4chz1w9OS3BYX6gaXHpP95QpU2SxWOyWiIgIW/358+eVlJSkBg0aqF69eho4cKCys7OdGDEAAAAAAP+fSyfdktSuXTsdPXrUtmzYsMFWN378eH322WdavHix1q1bpyNHjmjAgAFOjBYAAAAAgP/P5Q8v9/DwkNVqLVGem5urd955RwsXLlSPHj0kSfPmzVObNm20adMm3XTTTZUdKgAAAAAAdlx+T/e+ffsUEhKiZs2aaciQITp06JAkadu2bSooKFCvXr1sbSMiItSkSROlp6c7K1wAAAAAAGxcek93dHS05s+fr9atW+vo0aOaOnWqbrnlFu3cuVNZWVny9PSUv7+/3TpBQUHKysq6bL/5+fnKz8+3Pc7LyzMjfAAAAABADefSSXd8fLzt78jISEVHRyssLEwffvihateuXeF+U1NTNXXqVEeECAAAAABAmVz+8PLf8/f3V6tWrZSZmSmr1aoLFy4oJyfHrk12dnap54D/XnJysnJzc23L4cOHTYwaAAAAAFBTVamk+/Tp09q/f7+Cg4PVqVMn1apVS6tWrbLV7927V4cOHVJMTMxl+/Hy8pKvr6/dAgAAAACAo7n04eWPP/64+vXrp7CwMB05ckSTJ0+Wu7u7Bg8eLD8/P40YMUITJkxQQECAfH19NXbsWMXExHDlcgAAAACAS3DppPunn37S4MGDdeLECTVq1Eg333yzNm3apEaNGkmSXnvtNbm5uWngwIHKz89XXFyc3nzzTSdHDQAAAADAb1w66V60aNFl6729vTVr1izNmjWrkiICAAAAAKD8qtQ53QAAAAAAVCUk3QAAAAAAmISkGwAAAAAAk7j0Od0A4MqaPv25aX0fnJZgWt8AAACoPCTdLsDML+4AAAAAAOfh8HIAAAAAAExC0g0AAAAAgElIugEAAAAAMAlJNwAAqJDU1FR16dJFPj4+CgwMVP/+/bV37167NrGxsbJYLHbLww8/7KSIAQCofCTdAACgQtatW6ekpCRt2rRJaWlpKigoUO/evXXmzBm7diNHjtTRo0dty/Tp050UMQAAlY+rlwMAgApZvny53eP58+crMDBQ27ZtU7du3WzlderUkdVqrezwAABwCSTdAADAIXJzcyVJAQEBduULFizQP/7xD1mtVvXr10/PPfec6tSp44wQAaBacfSthw9OS3Bof/gNSTcAuCBHT6KXYlKFoxUVFWncuHHq2rWr2rdvbyu/7777FBYWppCQEG3fvl1PPfWU9u7dq48//rjUfvLz85Wfn297nJeXZ3rsAACYiaQbAABcs6SkJO3cuVMbNmywKx81apTt7w4dOig4OFg9e/bU/v371bx58xL9pKamaurUqabHCwBAZeFCagAA4JqMGTNGy5Yt05o1a9S4cePLto2OjpYkZWZmllqfnJys3Nxc23L48GGHxwsAQGViTzcAAKgQwzA0duxYffLJJ1q7dq3Cw8OvuE5GRoYkKTg4uNR6Ly8veXl5OTJMAACciqQbAABUSFJSkhYuXKilS5fKx8dHWVlZkiQ/Pz/Vrl1b+/fv18KFC9W3b181aNBA27dv1/jx49WtWzdFRkY6OXoAACoHSTcAAKiQ2bNnS5JiY2PtyufNm6fhw4fL09NTK1eu1Ouvv64zZ84oNDRUAwcO1MSJE50QLQAAzkHSDQAAKsQwjMvWh4aGat26dZUUDQAArokLqQEAAAAAYBKSbgAAAAAATELSDQAAAACASUi6AQAAAAAwCUk3AAAAAAAm4erlAAAAAAA1ffpzh/V1cFqCw/qq6tjTDQAAAACASUi6AQAAAAAwCUk3AAAAAAAm4ZxuAAAAAKgkjjxvGlUDe7oBAAAAADAJSTcAAAAAACYh6QYAAAAAwCQk3QAAAAAAmISkGwAAAAAAk3D1cgCogcy+curBaQmm9g8AAFBVsKcbAAAAAACTkHQDAAAAAGASkm4AAAAAAExC0g0AAAAAgElIugEAAAAAMAlJNwAAAAAAJuGWYeVg9q11AAAAAKA6cWQOVdVvRcqebgAAAAAATELSDQAAAACASUi6AQAAAAAwCUk3AAAAAAAmIekGAAAAAMAkJN0AAAAAAJiEpBsAAAAAAJNwn24AAAAAgMty5D2/pcq/7zd7ugEAAAAAMAlJNwAAAAAAJiHpBgAAAADAJJzTDQBwOEefe1XZKvtcLwAAUH2xpxsAAAAAAJOQdAMAAAAAYJJqk3TPmjVLTZs2lbe3t6Kjo/XNN984OyQAACDmaABAzVYtku5//vOfmjBhgiZPnqxvv/1WHTt2VFxcnI4dO+bs0AAAqNGYowEANZ3FMAzD2UFcq+joaHXp0kVvvPGGJKmoqEihoaEaO3asnn766Suun5eXJz8/P+Xm5srX17dEfVW/IBAAwLVczYXarjRHuTqz5+irxZwOAHDUBVPLO0dV+auXX7hwQdu2bVNycrKtzM3NTb169VJ6eroTIwMAoHRXk/gV5Z81MRJzMUcDAFANku5ffvlFhYWFCgoKsisPCgrSnj17Sl0nPz9f+fn5tse5ubmSfvulojRV+QsPAKBqK56DquKBaZUxR18t5nQAgKPmlOJ+rjRHV/mkuyJSU1M1derUEuWhoaFOiAYAgCs7ceKE/Pz8nB2G6ZijAQBm83vdsf2dOnXqsnN0lU+6GzZsKHd3d2VnZ9uVZ2dny2q1lrpOcnKyJkyYYHtcVFSkkydPqkGDBrJYLHZt8/LyFBoaqsOHD1fJc+nMwDYpHduldGyXktgmpWO7lC43N1dNmjRRQECAs0O5ambP0VerOr/HqvPYpOo9PsZWNTG2qsuR4zMMQ6dOnVJISMhl21X5pNvT01OdOnXSqlWr1L9/f0m/TdCrVq3SmDFjSl3Hy8tLXl5edmX+/v6XfR5fX99q+aa7FmyT0rFdSsd2KYltUjq2S+nc3KreDUcqa46+WtX5PVadxyZV7/ExtqqJsVVdjhpfeY5Cq/JJtyRNmDBBiYmJ6ty5s2688Ua9/vrrOnPmjB544AFnhwYAQI3GHA0AqOmqRdJ977336vjx45o0aZKysrJ0/fXXa/ny5SUu3AIAACoXczQAoKarFkm3JI0ZM6bMQ9WuhZeXlyZPnlziULeajG1SOrZL6dguJbFNSsd2KV112C5mzdFXqzpsy7JU57FJ1Xt8jK1qYmxVlzPGZzGq4j1IAAAAAACoAqreVVkAAAAAAKgiSLoBAAAAADAJSTcAAAAAACYh6ZY0ZcoUWSwWuyUiIsJWf/78eSUlJalBgwaqV6+eBg4cqOzsbCdGbI7169erX79+CgkJkcVi0ZIlS+zqDcPQpEmTFBwcrNq1a6tXr17at2+fXZuTJ09qyJAh8vX1lb+/v0aMGKHTp09X4igc60rbZPjw4SXeO3369LFrU922iSSlpqaqS5cu8vHxUWBgoPr376+9e/fatSnP/82hQ4eUkJCgOnXqKDAwUE888YQuXrxYmUNxmPJsk9jY2BLvl4cfftiuTXXaJpI0e/ZsRUZG2u6FGRMToy+//NJWX9PeJ8WutF1q4nvFkRwxn7kqR8xLrspRc4srctQc4Yoc8TnvyhzxeV0VTJs2TRaLRePGjbOVVfXX7vdKG19lvnYk3f/Trl07HT161LZs2LDBVjd+/Hh99tlnWrx4sdatW6cjR45owIABTozWHGfOnFHHjh01a9asUuunT5+uGTNmaM6cOdq8ebPq1q2ruLg4nT9/3tZmyJAh2rVrl9LS0rRs2TKtX79eo0aNqqwhONyVtokk9enTx+6988EHH9jVV7dtIknr1q1TUlKSNm3apLS0NBUUFKh37946c+aMrc2V/m8KCwuVkJCgCxcuaOPGjXrvvfc0f/58TZo0yRlDumbl2SaSNHLkSLv3y/Tp02111W2bSFLjxo01bdo0bdu2TVu3blWPHj105513ateuXZJq3vuk2JW2i1Tz3iuO5Ij5zFU5Yl5yVY6YW1yVI+YIV3Wtn/Ou7lo/r6uCLVu26K233lJkZKRdeVV/7YqVNT6pEl87A8bkyZONjh07llqXk5Nj1KpVy1i8eLGt7IcffjAkGenp6ZUUYeWTZHzyySe2x0VFRYbVajVeeuklW1lOTo7h5eVlfPDBB4ZhGMbu3bsNScaWLVtsbb788kvDYrEYP//8c6XFbpZLt4lhGEZiYqJx5513lrlOdd8mxY4dO2ZIMtatW2cYRvn+b7744gvDzc3NyMrKsrWZPXu24evra+Tn51fuAExw6TYxDMPo3r278ec//7nMdar7NilWv3594+233+Z9coni7WIYvFccqSLzWVVRkXmpKqnI3FJVVGSOqEqu5nO+Krqaz2tXd+rUKaNly5ZGWlqa3Viqy2tX1vgMo3JfO/Z0/8++ffsUEhKiZs2aaciQITp06JAkadu2bSooKFCvXr1sbSMiItSkSROlp6c7K9xKd+DAAWVlZdltBz8/P0VHR9u2Q3p6uvz9/dW5c2dbm169esnNzU2bN2+u9Jgry9q1axUYGKjWrVtr9OjROnHihK2upmyT3NxcSVJAQICk8v3fpKenq0OHDgoKCrK1iYuLU15ent2vx1XVpduk2IIFC9SwYUO1b99eycnJOnv2rK2uum+TwsJCLVq0SGfOnFFMTAzvk/+5dLsUq8nvFTOVZz6r6i43L1UlFZlbqoqKzBFVQUU+56uSinxeu7qkpCQlJCTYvUZS9fl/K2t8xSrrtfMwpdcqJjo6WvPnz1fr1q119OhRTZ06Vbfccot27typrKwseXp6yt/f326doKAgZWVlOSdgJyge6++/4BU/Lq7LyspSYGCgXb2Hh4cCAgKq7bbq06ePBgwYoPDwcO3fv1/PPPOM4uPjlZ6eLnd39xqxTYqKijRu3Dh17dpV7du3l6Ry/d9kZWWV+n4qrqvKStsmknTfffcpLCxMISEh2r59u5566int3btXH3/8saTqu0127NihmJgYnT9/XvXq1dMnn3yitm3bKiMjo0a/T8raLlLNfa9UhvLMZ1XZlealqqKic0tVUNE5wpVdy+d8VXAtn9eubNGiRfr222+1ZcuWEnXV4f/tcuOTKve1I+mWFB8fb/s7MjJS0dHRCgsL04cffqjatWs7MTK4ukGDBtn+7tChgyIjI9W8eXOtXbtWPXv2dGJklScpKUk7d+60uw5CTVfWNvn9ufwdOnRQcHCwevbsqf3796t58+aVHWalad26tTIyMpSbm6uPPvpIiYmJWrdunbPDcrqytkvbtm1r7HsF1666zEvVeW6pjnNEdf+cr46f14cPH9af//xnpaWlydvb29nhOFx5xleZrx2Hl5fC399frVq1UmZmpqxWqy5cuKCcnBy7NtnZ2bJarc4J0AmKx3rpFQt/vx2sVquOHTtmV3/x4kWdPHmyxmyrZs2aqWHDhsrMzJRU/bfJmDFjtGzZMq1Zs0aNGze2lZfn/8ZqtZb6fiquq6rK2ialiY6OliS790t13Caenp5q0aKFOnXqpNTUVHXs2FF/+9vfavT7RCp7u5SmprxXKkN55rPq5NJ5qSq4lrnF1V3LHOHKruVzviq4ls9rV7Vt2zYdO3ZMN9xwgzw8POTh4aF169ZpxowZ8vDwUFBQUJV+7a40vsLCwhLrmPnakXSX4vTp09q/f7+Cg4PVqVMn1apVS6tWrbLV7927V4cOHbI7l6O6Cw8Pl9VqtdsOeXl52rx5s207xMTEKCcnR9u2bbO1Wb16tYqKimxv4urup59+0okTJxQcHCyp+m4TwzA0ZswYffLJJ1q9erXCw8Pt6svzfxMTE6MdO3bY/SiRlpYmX19f2yFbVcmVtklpMjIyJMnu/VKdtklZioqKlJ+fXyPfJ5dTvF1KU1PfK2Yoz3xWnVw6L7kyR8wtrsoRc0RVcjWf81XR1Xxeu6qePXtqx44dysjIsC2dO3fWkCFDbH9X5dfuSuMr7XQbU1+7Srlcm4t77LHHjLVr1xoHDhwwvv76a6NXr15Gw4YNjWPHjhmGYRgPP/yw0aRJE2P16tXG1q1bjZiYGCMmJsbJUTveqVOnjO+++8747rvvDEnGq6++anz33XfGjz/+aBiGYUybNs3w9/c3li5damzfvt248847jfDwcOPcuXO2Pvr06WNERUUZmzdvNjZs2GC0bNnSGDx4sLOGdM0ut01OnTplPP7440Z6erpx4MABY+XKlcYNN9xgtGzZ0jh//rytj+q2TQzDMEaPHm34+fkZa9euNY4ePWpbzp49a2tzpf+bixcvGu3btzd69+5tZGRkGMuXLzcaNWpkJCcnO2NI1+xK2yQzM9NISUkxtm7dahw4cMBYunSp0axZM6Nbt262PqrbNjEMw3j66aeNdevWGQcOHDC2b99uPP3004bFYjG++uorwzBq3vuk2OW2S019rziSI+YzV+WIeclVOWJucVWOmCNc1bV+zru6a/28rkouvZp3VX/tLvX78VX2a0fSbRjGvffeawQHBxuenp7GddddZ9x7771GZmamrf7cuXPGI488YtSvX9+oU6eOcddddxlHjx51YsTmWLNmjSGpxJKYmGgYxm+3WXnuueeMoKAgw8vLy+jZs6exd+9euz5OnDhhDB482KhXr57h6+trPPDAA8apU6ecMBrHuNw2OXv2rNG7d2+jUaNGRq1atYywsDBj5MiRdrfwMYzqt00Mwyh1m0gy5s2bZ2tTnv+bgwcPGvHx8Ubt2rWNhg0bGo899phRUFBQyaNxjCttk0OHDhndunUzAgICDC8vL6NFixbGE088YeTm5tr1U522iWEYxoMPPmiEhYUZnp6eRqNGjYyePXvavogZRs17nxS73Hapqe8VR3LEfOaqHDEvuSpHzS2uyFFzhCtyxOe8K3PE53VVcWnSXdVfu0v9fnyV/dpZDMMwHL//HAAAAAAAcE43AAAAAAAmIekGAAAAAMAkJN0AAAAAAJiEpBsAAAAAAJOQdAMAAAAAYBKSbgAAAAAATELSDQAAAACASUi6AQAAAAAwCUk3AJczfPhw9e/f39lhAACASzBHA1ePpBuowZw9cR48eFAWi0UZGRlOiwEAAFfEHA1UHyTdAAAAAACYhKQbQKl27typ+Ph41atXT0FBQRo6dKh++eUXW31sbKweffRRPfnkkwoICJDVatWUKVPs+tizZ49uvvlmeXt7q23btlq5cqUsFouWLFkiSQoPD5ckRUVFyWKxKDY21m79l19+WcHBwWrQoIGSkpJUUFBg5pABAKgSmKOBqoWkG0AJOTk56tGjh6KiorR161YtX75c2dnZuueee+zavffee6pbt642b96s6dOnKyUlRWlpaZKkwsJC9e/fX3Xq1NHmzZs1d+5cPfvss3brf/PNN5KklStX6ujRo/r4449tdWvWrNH+/fu1Zs0avffee5o/f77mz59v7sABAHBxzNFA1ePh7AAAuJ433nhDUVFRevHFF21l7777rkJDQ/Wf//xHrVq1kiRFRkZq8uTJkqSWLVvqjTfe0KpVq3TbbbcpLS1N+/fv19q1a2W1WiVJf/nLX3TbbbfZ+mzUqJEkqUGDBrY2xerXr6833nhD7u7uioiIUEJCglatWqWRI0eaOnYAAFwZczRQ9ZB0Ayjh+++/15o1a1SvXr0Sdfv377eb0H8vODhYx44dkyTt3btXoaGhdhP1jTfeWO4Y2rVrJ3d3d7u+d+zYcVXjAACgumGOBqoekm4AJZw+fVr9+vXTX//61xJ1wcHBtr9r1aplV2exWFRUVOSQGMzsGwCAqoo5Gqh6SLoBlHDDDTfoX//6l5o2bSoPj4p9TLRu3VqHDx9Wdna2goKCJElbtmyxa+Pp6Snpt3PLAADAlTFHA1UPF1IDarjc3FxlZGTYLaNGjdLJkyc1ePBgbdmyRfv379eKFSv0wAMPlHvyve2229S8eXMlJiZq+/bt+vrrrzVx4kRJv/0iLkmBgYGqXbu27SIwubm5po0TAICqhjkaqB5IuoEabu3atYqKirJbnn/+eX399dcqLCxU79691aFDB40bN07+/v5ycyvfx4a7u7uWLFmi06dPq0uXLnrooYdsV0b19vaWJHl4eGjGjBl66623FBISojvvvNO0cQIAUNUwRwPVg8UwDMPZQQCoGb7++mvdfPPNyszMVPPmzZ0dDgAA+B/maMA8JN0ATPPJJ5+oXr16atmypTIzM/XnP/9Z9evX14YNG5wdGgAANRpzNFB5uJAaANOcOnVKTz31lA4dOqSGDRuqV69eeuWVV5wdFgAANR5zNFB52NMNAAAAAIBJuJAaAAAAAAAmIekGAAAAAMAkJN0AAAAAAJiEpBsAAAAAAJOQdAMAAAAAYBKSbgAAAAAATELSDQAAAACASUi6AQAAAAAwCUk3AAAAAAAm+X95HgNECv3AHAAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "fig, axes = plt.subplots(1, 2, figsize=(10, 5) )\n", + "axes[0].hist(NeW_Len_SciTLDR_train_Text, bins = 20 )\n", + "axes[0].set_title(\"SciTLDR_train_Text Length\")\n", + "axes[0].set_xlabel(\"Length\")\n", + "axes[0].set_ylabel(\"Count\")\n", + "axes[0].set_xlim((45 ,350))\n", + "\n", + "\n", + "axes[1].hist(NeW_Len_SciTLDR_train_Summary, bins = 20 )\n", + "axes[1].set_title(\"SciTLDR_train_Summary Length\")\n", + "axes[1].set_xlabel(\"Length\")\n", + "plt.tight_layout()\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2db5df1f", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "a4b0cd74", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6b7262eb", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 565, + "id": "3ae2e14c", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA9wAAAHqCAYAAAD27EaEAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy88F64QAAAACXBIWXMAAA9hAAAPYQGoP6dpAABOQElEQVR4nO3dd3xUVf7/8fekR1LoKbSEIqEqArIBFYRAZBFhQREX2YAIiHRXEFapSt0VkSIIq6AriKKCuAoIEViF0KVZaILwFRJASEKRAMn5/cEvo0MSSMLcTCZ5PR+PeTyYc8/c+dyTIWfeuc1mjDECAAAAAABO5eHqAgAAAAAAKIoI3AAAAAAAWIDADQAAAACABQjcAAAAAABYgMANAAAAAIAFCNwAAAAAAFiAwA0AAAAAgAUI3AAAAAAAWIDADQAAAACABQjcKJZatGihFi1auLoMwOlatGihunXruroMAG6MORIoXI4ePSqbzaZ//etfri4F+UDghtvYu3evHn30UVWpUkV+fn6qUKGCWrdurZkzZ97Welu0aCGbzXbLx9ixYyVJERERevjhh2+6zh49eji8NiAgQFWrVtWjjz6qjz/+WBkZGbesw9/fX/Xr19f06dOz7X8rb7zxhhYuXJjn1+XF999/r7Fjx+ro0aN5el1uxttms2n9+vVOqfPEiRMaO3asdu3alav+CxculM1m0/bt253y/s6W1+0BUPQxR+ZNYZ4jM1n1My3OCvsfpb/44gv7/yUUHV6uLgDIjU2bNunBBx9U5cqV1bt3b4WGhur48ePavHmzXn/9dQ0cODBP6/vyyy/t/37xxRf19NNP259v27ZNM2bM0D/+8Q/VqlXL3l6/fv08vYevr6/+/e9/S5J+++03/fzzz/rss8/06KOPqkWLFvr0008VFBTk8JqKFStq0qRJkqQzZ85o8eLFGjp0qE6fPq0JEybk6f3feOMNlS1bVj169MjT6/Li+++/17hx49SiRQtFRETk+nX/+c9/HJ6/++67WrNmTZb2P47/7Thx4oTGjRuniIgI3X333U5ZpysVte0BcHuYI4vWHCk5/2cK9/DFF19o9uzZhO4ihsANtzBhwgQFBwdr27ZtKlmypMOyU6dO5Xl9Pj4+9n+3bt3aYZmfn59mzJih1q1b39YhdV5eXnryyScd2l555RVNnjxZI0eOVO/evfXBBx84LA8ODnZ4zTPPPKOoqCjNnDlT48ePl6enZ77rKUxuHJfNmzdrzZo1WdoBALfGHFm05kjJ+T9Td2eM0eXLl+Xv7+/qUoA845ByuIXDhw+rTp06WSYdSSpfvnyWtvfee0/33nuv7rjjDpUqVUoPPPCAw1/sXXl+2ogRI9SmTRstXbpUBw4cuGlfPz8/NW7cWOfPn8/TBBsREaHvvvtOGzZssB9+98ftTU5O1pAhQ1SpUiX5+vqqevXqmjJlSpbD8pYsWaKGDRsqMDBQQUFBqlevnl5//XVJ1w+7fuyxxyRJDz74oNMPA8/IyND06dNVp04d+fn5KSQkRH379tW5c+fsfcaMGSMPDw/Fx8c7vLZPnz7y8fHR7t27tX79ejVu3FiS1LNnT3udzjiU8JdfftFTTz2lkJAQ+fr6qk6dOnr77bcd+qxfv142m00ffvihJkyYoIoVK8rPz0+tWrXSoUOHsqxz9uzZqlq1qvz9/XXvvffq66+/dvi85nZ7vv/+ez344IO64447VKFCBU2dOvW2txdA4cQcWfTmyNz+TDPP7c1uTvvjof6SNHbsWNlsNh04cEBPPvmkgoODVa5cOY0aNUrGGB0/flwdOnRQUFCQQkND9eqrrzqs74/z2bhx41ShQgUFBgbq0UcfVUpKitLS0jRkyBCVL19eAQEB6tmzp9LS0hzWsWDBArVs2VLly5eXr6+vateurTlz5mSpPfPUhNWrV6tRo0by9/fXm2++qebNm+uuu+7Kdsxq1qyp2NjYm4xq7q1cuVL333+/SpQoocDAQLVr107fffedQ58ePXooICBAv/zyizp27KiAgACVK1dOzz//vNLT0x36/vrrr+revbuCgoJUsmRJxcXFaffu3Q4/ux49emj27NmSHE+9u9G8efNUrVo1+fr6qnHjxtq2bZtTthnWYQ833EKVKlWUkJCgffv23fLcm3Hjxmns2LFq2rSpxo8fLx8fH23ZskVfffWV2rRpU0AV31z37t315Zdfas2aNbrzzjtv2jdzMs1u0s3J9OnTNXDgQAUEBOjFF1+UJIWEhEiSLl26pObNm+uXX35R3759VblyZW3atEkjR47UyZMnNX36dEnSmjVr9MQTT6hVq1aaMmWKJOmHH37Qxo0bNXjwYD3wwAMaNGhQlkMLnXUYeN++fbVw4UL17NlTgwYN0pEjRzRr1ix9++232rhxo7y9vfXSSy/ps88+U69evbR3714FBgZq9erVmj9/vl5++WXdddddSkpK0vjx4zV69Gj16dNH999/vySpadOmt1VfUlKS/vSnP8lms2nAgAEqV66cVq5cqV69eik1NVVDhgxx6D958mR5eHjo+eefV0pKiqZOnapu3bppy5Yt9j5z5szRgAEDdP/992vo0KE6evSoOnbsqFKlSqlixYqSro/vrbbn3Llzeuihh9SpUyd16dJFH330kV544QXVq1dPbdu2va3tBlD4MEcWvTkyLz/TvHr88cdVq1YtTZ48WZ9//rleeeUVlS5dWm+++aZatmypKVOmaNGiRXr++efVuHFjPfDAAw6vnzRpkvz9/TVixAgdOnRIM2fOlLe3tzw8PHTu3DmNHTtWmzdv1sKFCxUZGanRo0fbXztnzhzVqVNHjzzyiLy8vPTZZ5/p2WefVUZGhvr37+/wPvv379cTTzyhvn37qnfv3qpZs6YCAgLUu3fvLOOybds2HThwQC+99NJtj89//vMfxcXFKTY2VlOmTNGlS5c0Z84c3Xffffr2228dTg9IT09XbGysmjRpon/9619au3atXn31VVWrVk39+vWTdH0HQvv27bV161b169dPUVFR+vTTTxUXF+fwvn379tWJEyeyPcUu0+LFi3X+/Hn17dtXNptNU6dOVadOnfTTTz/J29v7trcdFjGAG/jyyy+Np6en8fT0NNHR0Wb48OFm9erV5sqVKw79Dh48aDw8PMxf/vIXk56e7rAsIyPD/u/mzZub5s2bZ/teS5cuNZLMunXrsl1epUoV065du5vWGxcXZ0qUKJHj8m+//dZIMkOHDnWoKSoqypw+fdqcPn3a/Pjjj2bYsGFG0i3fLzt16tTJdhtffvllU6JECXPgwAGH9hEjRhhPT09z7NgxY4wxgwcPNkFBQebatWs5vsetxiq3+vfvb/746+jrr782ksyiRYsc+q1atSpL+969e42Pj495+umnzblz50yFChVMo0aNzNWrV+19tm3bZiSZBQsW5KqeBQsWGElm27ZtOfbp1auXCQsLM2fOnHFo79q1qwkODjaXLl0yxhizbt06I8nUqlXLpKWl2fu9/vrrRpLZu3evMcaYtLQ0U6ZMGdO4cWOH2hcuXGgkOfwsb7Y9zZs3N5LMu+++a29LS0szoaGhpnPnzrnafgDuhTmy6M2Ruf2ZHjlyJMf5QJIZM2aM/fmYMWOMJNOnTx9727Vr10zFihWNzWYzkydPtrefO3fO+Pv7m7i4OHtb5nxWt25dhzqeeOIJY7PZTNu2bR3ePzo62lSpUsWhLXNu/KPY2FhTtWpVh7YqVaoYSWbVqlUO7cnJycbPz8+88MILDu2DBg0yJUqUMBcuXMiy/j9q3ry5qVOnTo7Lz58/b0qWLGl69+7t0J6YmGiCg4Md2uPi4owkM378eIe+DRo0MA0bNrQ///jjj40kM336dHtbenq6admyZZaf3Y3fhzJl/pzLlCljzp49a2//9NNPjSTz2Wef3XS74VocUg630Lp1ayUkJOiRRx7R7t27NXXqVMXGxqpChQpasWKFvd/y5cuVkZGh0aNHy8PD8eOd3WE5rhIQECBJOn/+vEP7jz/+qHLlyqlcuXKKiorSP//5Tz3yyCNOvZLq0qVLdf/996tUqVI6c+aM/RETE6P09HT973//kySVLFlSFy9e1Jo1a5z23nmpMTg4WK1bt3aosWHDhgoICNC6devsfevWratx48bp3//+t2JjY3XmzBm988478vKy7gAeY4w+/vhjtW/fXsYYhxpjY2OVkpKinTt3OrymZ8+eDudFZu6Z/umnnyRJ27dv16+//qrevXs71N6tWzeVKlUqT/UFBAQ4nOfo4+Oje++91/5eAIoW5siFTnvvwjJH5vZnmh9/vAiep6enGjVqJGOMevXqZW8vWbKkatasme288be//c1hb2qTJk1kjNFTTz3l0K9JkyY6fvy4rl27Zm/74znYKSkpOnPmjJo3b66ffvpJKSkpDq+PjIzMcoh4cHCwOnTooPfff1/GGEnX9zJ/8MEH6tixo0qUKJGXochizZo1Sk5O1hNPPOHw8/f09FSTJk0cvn9keuaZZxye33///Q7jtmrVKnl7e6t37972Ng8Pjyx79HPj8ccfd/hOcON3CRROHFIOt9G4cWN98sknunLlinbv3q1ly5bptdde06OPPqpdu3apdu3aOnz4sDw8PFS7dm1Xl3tTFy5ckCQFBgY6tEdERGj+/PnKyMjQ4cOHNWHCBJ0+fVp+fn5Oe++DBw9qz549KleuXLbLM8+De/bZZ/Xhhx+qbdu2qlChgtq0aaMuXbrooYceclotN6sxJSUl23MP/1hjpmHDhmnJkiXaunWrJk6caPnP//Tp00pOTta8efM0b968XNVYuXJlh+eZE2bmOek///yzJKl69eoO/by8vPJ8dduKFStm+fJcqlQp7dmzJ0/rAeA+mCOdozDNkbn5mebHjfNRcHCw/Pz8VLZs2Sztv/76a65eL0mVKlXK0p6RkaGUlBSVKVNGkrRx40aNGTNGCQkJunTpkkP/lJQU+7qk64E7O3/729/0wQcf6Ouvv9YDDzygtWvXKikpSd27d7/ZZufKwYMHJUktW7bMdvmNV8738/PL8lkpVaqUw/Vmfv75Z4WFhemOO+5w6HfjfJ8bt/ougcKJwA234+Pjo8aNG6tx48a688471bNnTy1dulRjxoxxdWm5tm/fPklZf9mWKFFCMTEx9ufNmjXTPffco3/84x+aMWOGU947IyNDrVu31vDhw7Ndnnm+XPny5bVr1y6tXr1aK1eu1MqVK7VgwQL97W9/0zvvvOOUWm5WY/ny5bVo0aJsl984uf3000/2SXLv3r2W1pZZn3T9aus3noOV6cZb5OR09dzMv9A7U0G+F4DChTny9hTGOfJmP9Ocjky48aJdf5TdHJGXeSOnvrdax+HDh9WqVStFRUVp2rRpqlSpknx8fPTFF1/otddey3JRupyuSB4bG6uQkBC99957euCBB/Tee+8pNDTU4bORX5k1/Oc//1FoaGiW5TcePVfQV8ZnfndPBG64tUaNGkmSTp48KUmqVq2aMjIy9P333xfq+xP/5z//kc1my3K7lRvVr19fTz75pN588009//zzWf6yeTM5TcLVqlXThQsXcjUx+fj4qH379mrfvr0yMjL07LPP6s0339SoUaNUvXp1yw5BrFatmtauXatmzZrd8hYgGRkZ6tGjh4KCgjRkyBBNnDhRjz76qDp16mTv4+w6y5Urp8DAQKWnpztlgpeuXyBHkg4dOqQHH3zQ3n7t2jUdPXrUIcAXpkM/ARRezJE5c9c58safaeYezuTkZId+mUdNFSafffaZ0tLStGLFCoefVXaHad+Mp6en/vrXv2rhwoWaMmWKli9frt69ezsl/FarVk3S9T+oOHN+X7dunS5duuSwlzu7O5UwvxdNnMMNt7Bu3bps/3r3xRdfSLp+KwhJ6tixozw8PDR+/PgsfyktLH/9mzx5sr788ks9/vjjqlGjxi37Dx8+XFevXtW0adPy9D4lSpTIMgFLUpcuXZSQkKDVq1dnWZacnGw/1+rGw8g8PDzsoS/zNh+Z50pl9z63o0uXLkpPT9fLL7+cZdm1a9cc3m/atGnatGmT5s2bp5dffllNmzZVv379dObMGXsfZ9fp6empzp076+OPP7bvifmj06dP53mdjRo1UpkyZTR//nyH890WLVqU5VAxq8YdgHtijix6c2Ruf6ZBQUEqW7as/dzyTG+88Uae39NqmYH4j9uVkpKiBQsW5Hld3bt317lz59S3b19duHAhyz3d8ys2NlZBQUGaOHGirl69mmV5fub32NhYXb16VfPnz7e3ZWRk2G8B9kfM70UTe7jhFgYOHKhLly7pL3/5i6KionTlyhVt2rRJH3zwgSIiItSzZ09J1w8/e/HFF/Xyyy/r/vvvV6dOneTr66tt27YpPDxckyZNcko9hw4d0iuvvJKlvUGDBmrXrp2k68HwvffekyRdvnxZP//8s1asWKE9e/bowQcfzPHc3xvVrl1bf/7zn/Xvf/9bo0aNsp8HdSsNGzbUnDlz9Morr6h69eoqX768WrZsqWHDhmnFihV6+OGH1aNHDzVs2FAXL17U3r179dFHH+no0aMqW7asnn76aZ09e1YtW7ZUxYoV9fPPP2vmzJm6++677bc1ufvuu+Xp6akpU6YoJSVFvr6+9vtr3o7mzZurb9++mjRpknbt2qU2bdrI29tbBw8e1NKlS/X666/r0Ucf1Q8//KBRo0apR48eat++vaTr9z69++677efXSdf/Yl2yZEnNnTtXgYGBKlGihJo0aZLj+WGZ3n77ba1atSpL++DBgzV58mStW7dOTZo0Ue/evVW7dm2dPXtWO3fu1Nq1a3X27Nk8bbOPj4/Gjh2rgQMHqmXLlurSpYuOHj2qhQsXqlq1ag5/9c7v9gAompgji94cmdufqXT9ImiTJ0/W008/rUaNGul///vfLe9h7gpt2rSxHxWQGZTnz5+v8uXL2/fY51aDBg1Ut25dLV26VLVq1dI999yT69eePn06289nZGSkunXrpjlz5qh79+6655571LVrV5UrV07Hjh3T559/rmbNmmnWrFl5qrVjx46699579fe//12HDh1SVFSUVqxYYf+e8Mf5vWHDhpKkQYMGKTY2Vp6enuratWue3g+FUMFfGB3Iu5UrV5qnnnrKREVFmYCAAOPj42OqV69uBg4caJKSkrL0f/vtt02DBg2Mr6+vKVWqlGnevLlZs2aNffnt3vJEUraPXr16GWN+v1VE5uOOO+4wERERpnPnzuajjz7KcjuWzJpyulXF+vXrs9ze41YSExNNu3btTGBgYJbbSp0/f96MHDnSVK9e3fj4+JiyZcuapk2bmn/961/2W3189NFHpk2bNqZ8+fLGx8fHVK5c2fTt29ecPHnS4X3mz59vqlatajw9PfN9+5OcboMxb94807BhQ+Pv728CAwNNvXr1zPDhw82JEyfMtWvXTOPGjU3FihVNcnKyw+syb7n1wQcf2Ns+/fRTU7t2bePl5XXLW4Rl3hYsp8fx48eNMcYkJSWZ/v37m0qVKhlvb28TGhpqWrVqZebNm2dfV+ZtVJYuXerwHjndymXGjBmmSpUqxtfX19x7771m48aNpmHDhuahhx5y6JfT9uT0OYqLi8tyexYARQNzZNGbI/PyM7106ZLp1auXCQ4ONoGBgaZLly7m1KlTOd4W7PTp0w6vz+k2bTeOeU7zWU630szu/VasWGHq169v/Pz8TEREhJkyZYp5++23jSRz5MgRe7/c3F5u6tSpRpKZOHHiTfvduE05fT5btWrlsK2xsbEmODjY+Pn5mWrVqpkePXqY7du32/vkNG6Z2/1Hp0+fNn/9619NYGCgCQ4ONj169DAbN240ksySJUvs/a5du2YGDhxoypUrZ2w2m309md8Z/vnPf2Z5v7x+9lHwbMYUkmOIAABZZGRkqFy5curUqZPD4WgAABRnr7/+uoYOHaqjR4/m6fz9wmL58uX6y1/+om+++UbNmjVzdTmwEOdwA0Ahcfny5Szn7L377rs6e/asWrRo4ZqiAAAoZIwxeuutt9S8eXO3CNu//fabw/P09HTNnDlTQUFBeTocHu6Jc7gBN3P69Omb3u7Dx8dHpUuXLsCKHF24cMF+D9WclCtXrsBvpeEONm/erKFDh+qxxx5TmTJltHPnTr311luqW7euHnvsMVeXBwCFHnNk0Xbx4kWtWLFC69at0969e/Xpp5+6uqRcGThwoH777TdFR0crLS1Nn3zyiTZt2qSJEyfe8m4scH8cUg64mYiIiJve7qN58+Zav359wRV0g7Fjx2rcuHE37XPkyBFFREQUTEFu5OjRoxo0aJC2bt2qs2fPqnTp0vrzn/+syZMn3/aF6ACgOGCOLNqOHj2qyMhIlSxZUs8++6wmTJjg6pJyZfHixXr11Vd16NAhXb58WdWrV1e/fv00YMAAV5eGAkDgBtzMxo0bsxya9EelSpWyX+XSFX766Sf99NNPN+1z3333yc/Pr4AqAgAUF8yRAAobAjcAAAAAABbgomkAAAAAAFigyF80LSMjQydOnFBgYKDDjeUBAHBHxhidP39e4eHh8vAovH83Z/4FABQl+Z1/i3zgPnHihCpVquTqMgAAcKrjx4+rYsWKri4jR8y/AICiKK/zb5EP3IGBgZKuD0xQUJCLqwEA4PakpqaqUqVK9vmtsGL+BQAUJfmdf4t84M48jC0oKIgJHwBQZBT2w7SZfwEARVFe59/Ce/IXAAAAAABujMANAAAAAIAFCNwAAAAAAFiAwA0AAAAAgAUI3AAAAAAAWIDADQAAAACABQjcAAAAAABYgMANAAAAAIAFCNwAAAAAAFiAwA0AAAAAgAUI3AAAAAAAWIDADQAAAACABQjcAAAAAABYgMANAAAAAIAFCNwAAAAAAFiAwA0AAAAAgAUI3AAAAAAAWMDL1QUARVnEiM+dur6jk9s5dX0AABQGzJcAiir2cAMAAAAAYAECNwAAAAAAFiBwAwAAAABgAQI3AAAAAAAWIHADAAAAAGABAjcAAAAAABYgcAMAAAAAYAECNwAAAAAAFiBwAwAAAABgAQI3AAAAAAAWIHADAAAAAGABAjcAAAAAABYgcAMAAAAAYAECNwAAAAAAFiBwAwAAAABgAQI3AAAAAAAWIHADAAAAAGABAjcAAAAAABYgcAMAAAAAYAECNwAAAAAAFiBwAwAAAABgAQI3AAAAAAAWIHADAAAAAGABAjcAAAAAABYgcAMAAAAAYAGXBu709HSNGjVKkZGR8vf3V7Vq1fTyyy/LGGPvY4zR6NGjFRYWJn9/f8XExOjgwYMurBoAAAAAgFtzaeCeMmWK5syZo1mzZumHH37QlClTNHXqVM2cOdPeZ+rUqZoxY4bmzp2rLVu2qESJEoqNjdXly5ddWDkAAAAAADfn5co337Rpkzp06KB27dpJkiIiIvT+++9r69atkq7v3Z4+fbpeeukldejQQZL07rvvKiQkRMuXL1fXrl1dVjsAAAAAADfj0j3cTZs2VXx8vA4cOCBJ2r17t7755hu1bdtWknTkyBElJiYqJibG/prg4GA1adJECQkJLqkZAAAAAIDccOke7hEjRig1NVVRUVHy9PRUenq6JkyYoG7dukmSEhMTJUkhISEOrwsJCbEvu1FaWprS0tLsz1NTUy2qHgAAAACAnLk0cH/44YdatGiRFi9erDp16mjXrl0aMmSIwsPDFRcXl691Tpo0SePGjXNypSguIkZ87uoSAAAAABQRLj2kfNiwYRoxYoS6du2qevXqqXv37ho6dKgmTZokSQoNDZUkJSUlObwuKSnJvuxGI0eOVEpKiv1x/PhxazcCAAAAAIBsuDRwX7p0SR4ejiV4enoqIyNDkhQZGanQ0FDFx8fbl6empmrLli2Kjo7Odp2+vr4KCgpyeAAAAAAAUNBcekh5+/btNWHCBFWuXFl16tTRt99+q2nTpumpp56SJNlsNg0ZMkSvvPKKatSoocjISI0aNUrh4eHq2LGjK0sHAAAAAOCmXBq4Z86cqVGjRunZZ5/VqVOnFB4err59+2r06NH2PsOHD9fFixfVp08fJScn67777tOqVavk5+fnwsoBAAAAALg5lwbuwMBATZ8+XdOnT8+xj81m0/jx4zV+/PiCKwwAAAAAgNvk0nO4AQAAAAAoqgjcAAAAAABYgMANAAAAAIAFCNwAAAAAAFiAwA0AAAAAgAUI3AAAAAAAWIDADQAAAACABQjcAAAAAABYgMANAAAAAIAFCNwAAAAAAFiAwA0AAAAAgAUI3AAAAAAAWIDADQAAAACABQjcAAAAAABYgMANAAAAAIAFCNwAABQz6enpGjVqlCIjI+Xv769q1arp5ZdfljHG3scYo9GjRyssLEz+/v6KiYnRwYMHXVg1AADuh8ANAEAxM2XKFM2ZM0ezZs3SDz/8oClTpmjq1KmaOXOmvc/UqVM1Y8YMzZ07V1u2bFGJEiUUGxury5cvu7ByAADci5erCwAAAAVr06ZN6tChg9q1aydJioiI0Pvvv6+tW7dKur53e/r06XrppZfUoUMHSdK7776rkJAQLV++XF27dnVZ7QAAuBP2cAMAUMw0bdpU8fHxOnDggCRp9+7d+uabb9S2bVtJ0pEjR5SYmKiYmBj7a4KDg9WkSRMlJCS4pGYAANwRe7gBAChmRowYodTUVEVFRcnT01Pp6emaMGGCunXrJklKTEyUJIWEhDi8LiQkxL7sRmlpaUpLS7M/T01Ntah6AADcB4Ebbi1ixOeuLgEA3M6HH36oRYsWafHixapTp4527dqlIUOGKDw8XHFxcfla56RJkzRu3DgnVwoAgHvjkHIAAIqZYcOGacSIEeratavq1aun7t27a+jQoZo0aZIkKTQ0VJKUlJTk8LqkpCT7shuNHDlSKSkp9sfx48et3QgAANwAgRsAgGLm0qVL8vBw/Arg6empjIwMSVJkZKRCQ0MVHx9vX56amqotW7YoOjo623X6+voqKCjI4QEAQHHHIeUAABQz7du314QJE1S5cmXVqVNH3377raZNm6annnpKkmSz2TRkyBC98sorqlGjhiIjIzVq1CiFh4erY8eOri0eAAA3QuAGAKCYmTlzpkaNGqVnn31Wp06dUnh4uPr27avRo0fb+wwfPlwXL15Unz59lJycrPvuu0+rVq2Sn5+fCysHAMC92IwxxtVFWCk1NVXBwcFKSUnh8LYiiIum3Z6jk9u5ugQAeeQu85q71InCwdnzOfMbAGfL77zGOdwAAAAAAFiAwA0AAAAAgAUI3AAAAAAAWIDADQAAAACABQjcAAAAAABYgMANAAAAAIAFCNwAAAAAAFiAwA0AAAAAgAUI3AAAAAAAWIDADQAAAACABQjcAAAAAABYgMANAAAAAIAFCNwAAAAAAFiAwA0AAAAAgAUI3AAAAAAAWIDADQAAAACABQjcAAAAAABYgMANAAAAAIAFCNwAAAAAAFiAwA0AAAAAgAUI3AAAAAAAWIDADQAAAACABQjcAAAAAABYgMANAAAAAIAFCNwAAAAAAFiAwA0AAAAAgAUI3AAAAAAAWIDADQAAAACABQjcAAAAAABYgMANAAAAAIAFCNwAAAAAAFiAwA0AAAAAgAUI3AAAAAAAWIDADQAAAACABQjcAAAAAABYgMANAAAAAIAFCNwAAAAAAFiAwA0AAAAAgAUI3AAAAAAAWIDADQAAAACABQjcAAAAAABYgMANAAAAAIAFCNwAAAAAAFiAwA0AAAAAgAUI3AAAAAAAWIDADQAAAACABQjcAAAAAABYwMvVBQAAAADOFDHic6eu7+jkdk5dH4Digz3cAAAAAABYgMANAAAAAIAFCNwAAAAAAFiAwA0AAAAAgAVcHrh/+eUXPfnkkypTpoz8/f1Vr149bd++3b7cGKPRo0crLCxM/v7+iomJ0cGDB11YMQAAAAAAt+bSwH3u3Dk1a9ZM3t7eWrlypb7//nu9+uqrKlWqlL3P1KlTNWPGDM2dO1dbtmxRiRIlFBsbq8uXL7uwcgAAAAAAbs6ltwWbMmWKKlWqpAULFtjbIiMj7f82xmj69Ol66aWX1KFDB0nSu+++q5CQEC1fvlxdu3Yt8JoBAAAAAMgNl+7hXrFihRo1aqTHHntM5cuXV4MGDTR//nz78iNHjigxMVExMTH2tuDgYDVp0kQJCQnZrjMtLU2pqakODwAAAAAACppLA/dPP/2kOXPmqEaNGlq9erX69eunQYMG6Z133pEkJSYmSpJCQkIcXhcSEmJfdqNJkyYpODjY/qhUqZK1GwEAAAAAQDZcGrgzMjJ0zz33aOLEiWrQoIH69Omj3r17a+7cufle58iRI5WSkmJ/HD9+3IkVAwAAAACQOy4N3GFhYapdu7ZDW61atXTs2DFJUmhoqCQpKSnJoU9SUpJ92Y18fX0VFBTk8AAAAAAAoKC5NHA3a9ZM+/fvd2g7cOCAqlSpIun6BdRCQ0MVHx9vX56amqotW7YoOjq6QGsFAAAAACAvXHqV8qFDh6pp06aaOHGiunTpoq1bt2revHmaN2+eJMlms2nIkCF65ZVXVKNGDUVGRmrUqFEKDw9Xx44dXVk6AAAAiomIEZ87dX1HJ7dz6voAFF4uDdyNGzfWsmXLNHLkSI0fP16RkZGaPn26unXrZu8zfPhwXbx4UX369FFycrLuu+8+rVq1Sn5+fi6sHAAAAACAm3Np4Jakhx9+WA8//HCOy202m8aPH6/x48cXYFUAAAAAANwel57DDQAAAABAUUXgBgAAAADAAgRuAAAAAAAsQOAGAAAAAMACBG4AAAAAACzg8quUAwAAAMUJ9/UGig/2cAMAAAAAYAECNwAAAAAAFiBwAwBQDP3yyy968sknVaZMGfn7+6tevXravn27fbkxRqNHj1ZYWJj8/f0VExOjgwcPurBiAADcD4EbAIBi5ty5c2rWrJm8vb21cuVKff/993r11VdVqlQpe5+pU6dqxowZmjt3rrZs2aISJUooNjZWly9fdmHlAAC4Fy6aBgBAMTNlyhRVqlRJCxYssLdFRkba/22M0fTp0/XSSy+pQ4cOkqR3331XISEhWr58ubp27VrgNQMA4I7Yww0AQDGzYsUKNWrUSI899pjKly+vBg0aaP78+fblR44cUWJiomJiYuxtwcHBatKkiRISElxRMgAAbonADQBAMfPTTz9pzpw5qlGjhlavXq1+/fpp0KBBeueddyRJiYmJkqSQkBCH14WEhNiX3SgtLU2pqakODwAAijsOKQcAoJjJyMhQo0aNNHHiRElSgwYNtG/fPs2dO1dxcXH5WuekSZM0btw4Z5YJAIDbYw83AADFTFhYmGrXru3QVqtWLR07dkySFBoaKklKSkpy6JOUlGRfdqORI0cqJSXF/jh+/LgFlQMA4F4I3AAAFDPNmjXT/v37HdoOHDigKlWqSLp+AbXQ0FDFx8fbl6empmrLli2Kjo7Odp2+vr4KCgpyeAAAUNxxSDkAAMXM0KFD1bRpU02cOFFdunTR1q1bNW/ePM2bN0+SZLPZNGTIEL3yyiuqUaOGIiMjNWrUKIWHh6tjx46uLR4AADdC4AYAoJhp3Lixli1bppEjR2r8+PGKjIzU9OnT1a1bN3uf4cOH6+LFi+rTp4+Sk5N13333adWqVfLz83Nh5QAAuBcCNwAAxdDDDz+shx9+OMflNptN48eP1/jx4wuwKgAAihbO4QYAAAAAwAIEbgAAAAAALEDgBgAAAADAAgRuAAAAAAAsQOAGAAAAAMACBG4AAAAAACxA4AYAAAAAwAIEbgAAAAAALEDgBgAAAADAAgRuAAAAAAAsQOAGAAAAAMACBG4AAAAAACxA4AYAAAAAwAIEbgAAAAAALEDgBgAAAADAAgRuAAAAAAAsQOAGAAAAAMACBG4AAAAAACzg5eoCALhOxIjPnbq+o5PbOXV9AAAAgDvL1x7uqlWr6tdff83SnpycrKpVq952UQAAAAAAuLt8Be6jR48qPT09S3taWpp++eWX2y4KAAAAAAB3l6dDylesWGH/9+rVqxUcHGx/np6ervj4eEVERDitOAAAAAAA3FWeAnfHjh0lSTabTXFxcQ7LvL29FRERoVdffdVpxQEAAAAA4K7yFLgzMjIkSZGRkdq2bZvKli1rSVEAAAAAALi7fF2l/MiRI86uAwAAAACAIiXftwWLj49XfHy8Tp06Zd/znentt9++7cIAAAAAAHBn+Qrc48aN0/jx49WoUSOFhYXJZrM5uy4AAAAAANxavgL33LlztXDhQnXv3t3Z9QAAAADIg4gRnzt1fUcnt3Pq+oDiLF/34b5y5YqaNm3q7FoAAAAAACgy8hW4n376aS1evNjZtQAAAAAAUGTk65Dyy5cva968eVq7dq3q168vb29vh+XTpk1zSnEAAAAAALirfAXuPXv26O6775Yk7du3z2EZF1ADAAAAACCfgXvdunXOrgNAEcBFWwAAAIDf5escbgAAAAAAcHP52sP94IMP3vTQ8a+++irfBQEAAAAAUBTkK3Bnnr+d6erVq9q1a5f27dunuLg4Z9QFAAAAAIBby1fgfu2117JtHzt2rC5cuHBbBQEAAAAAUBQ49RzuJ598Um+//bYzVwkAAAAAgFtyauBOSEiQn5+fM1cJAAAAAIBbytch5Z06dXJ4bozRyZMntX37do0aNcophQEAAAAA4M7yFbiDg4Mdnnt4eKhmzZoaP3682rRp45TCAAAAAABwZ/kK3AsWLHB2HQAAAAAAFCn5CtyZduzYoR9++EGSVKdOHTVo0MApRQEAAAAA4O7yFbhPnTqlrl27av369SpZsqQkKTk5WQ8++KCWLFmicuXKObNGAAAAAADcTr6uUj5w4ECdP39e3333nc6ePauzZ89q3759Sk1N1aBBg5xdIwAAAAAAbidfe7hXrVqltWvXqlatWva22rVra/bs2Vw0DQAAAAAA5XMPd0ZGhry9vbO0e3t7KyMj47aLAgAAAADA3eUrcLds2VKDBw/WiRMn7G2//PKLhg4dqlatWjmtOAAAAAAA3FW+AvesWbOUmpqqiIgIVatWTdWqVVNkZKRSU1M1c+ZMZ9cIAAAAAIDbydc53JUqVdLOnTu1du1a/fjjj5KkWrVqKSYmxqnFAQAAAADgrvK0h/urr75S7dq1lZqaKpvNptatW2vgwIEaOHCgGjdurDp16ujrr7+2qlYAAAAAANxGngL39OnT1bt3bwUFBWVZFhwcrL59+2ratGlOKw4AAAAAAHeVp8C9e/duPfTQQzkub9OmjXbs2HHbRQEAAAAA4O7yFLiTkpKyvR1YJi8vL50+ffq2iwIAAAAAwN3lKXBXqFBB+/bty3H5nj17FBYWdttFAQAAAADg7vIUuP/85z9r1KhRunz5cpZlv/32m8aMGaOHH37YacUBAAAAAOCu8nRbsJdeekmffPKJ7rzzTg0YMEA1a9aUJP3444+aPXu20tPT9eKLL1pSKAAAAAAA7iRPgTskJESbNm1Sv379NHLkSBljJEk2m02xsbGaPXu2QkJCLCkUAAAAAAB3kqdDyiWpSpUq+uKLL3TmzBlt2bJFmzdv1pkzZ/TFF18oMjIy34VMnjxZNptNQ4YMsbddvnxZ/fv3V5kyZRQQEKDOnTsrKSkp3+8BAAAAAEBBydMe7j8qVaqUGjdu7JQitm3bpjfffFP169d3aB86dKg+//xzLV26VMHBwRowYIA6deqkjRs3OuV9AQAAioOIEZ87dX1HJ7dz6voAoKjK8x5uZ7tw4YK6deum+fPnq1SpUvb2lJQUvfXWW5o2bZpatmyphg0basGCBdq0aZM2b97swooBAAAAALg1lwfu/v37q127doqJiXFo37Fjh65everQHhUVpcqVKyshIaGgywQAAAAAIE/yfUi5MyxZskQ7d+7Utm3bsixLTEyUj4+PSpYs6dAeEhKixMTEHNeZlpamtLQ0+/PU1FSn1QsAAAAAQG65LHAfP35cgwcP1po1a+Tn5+e09U6aNEnjxo1z2vrgXM4+hwxFG+ccAgAAwJ257JDyHTt26NSpU7rnnnvk5eUlLy8vbdiwQTNmzJCXl5dCQkJ05coVJScnO7wuKSlJoaGhOa535MiRSklJsT+OHz9u8ZYAAAAAAJCVy/Zwt2rVSnv37nVo69mzp6KiovTCCy+oUqVK8vb2Vnx8vDp37ixJ2r9/v44dO6bo6Ogc1+vr6ytfX19LawcAAAAA4FZcFrgDAwNVt25dh7YSJUqoTJky9vZevXrpueeeU+nSpRUUFKSBAwcqOjpaf/rTn1xRMgAAAAAAuebSi6bdymuvvSYPDw917txZaWlpio2N1RtvvOHqsgAAAAAAuKVCFbjXr1/v8NzPz0+zZ8/W7NmzXVMQAAAAAAD55PL7cAMAAAAAUBQRuAEAAAAAsACBGwCAYmzy5Mmy2WwaMmSIve3y5cvq37+/ypQpo4CAAHXu3FlJSUmuKxIAADdVqM7hBgAABWfbtm168803Vb9+fYf2oUOH6vPPP9fSpUsVHBysAQMGqFOnTtq4caOLKkVhEzHic1eXAABugT3cAAAUQxcuXFC3bt00f/58lSpVyt6ekpKit956S9OmTVPLli3VsGFDLViwQJs2bdLmzZtdWDEAAO6HwA0AQDHUv39/tWvXTjExMQ7tO3bs0NWrVx3ao6KiVLlyZSUkJBR0mQAAuDUOKQcAoJhZsmSJdu7cqW3btmVZlpiYKB8fH5UsWdKhPSQkRImJiTmuMy0tTWlpafbnqampTqsXAAB3xR5uAACKkePHj2vw4MFatGiR/Pz8nLbeSZMmKTg42P6oVKmS09YNAIC7InADAFCM7NixQ6dOndI999wjLy8veXl5acOGDZoxY4a8vLwUEhKiK1euKDk52eF1SUlJCg0NzXG9I0eOVEpKiv1x/Phxi7cEAIDCj0PKAQAoRlq1aqW9e/c6tPXs2VNRUVF64YUXVKlSJXl7eys+Pl6dO3eWJO3fv1/Hjh1TdHR0juv19fWVr6+vpbUDAOBuCNwAABQjgYGBqlu3rkNbiRIlVKZMGXt7r1699Nxzz6l06dIKCgrSwIEDFR0drT/96U+uKBkAALdF4AYAAA5ee+01eXh4qHPnzkpLS1NsbKzeeOMNV5cFAIDbIXADAFDMrV+/3uG5n5+fZs+erdmzZ7umIAAAiggumgYAAAAAgAUI3AAAAAAAWIDADQAAAACABQjcAAAAAABYgMANAAAAAIAFCNwAAAAAAFiA24IBAAAAsIsY8blT13d0cjunrg9wJ+zhBgAAAADAAgRuAAAAAAAsQOAGAAAAAMACBG4AAAAAACxA4AYAAAAAwAIEbgAAAAAALEDgBgAAAADAAgRuAAAAAAAsQOAGAAAAAMACBG4AAAAAACxA4AYAAAAAwAIEbgAAAAAALEDgBgAAAADAAgRuAAAAAAAsQOAGAAAAAMACBG4AAAAAACxA4AYAAAAAwAIEbgAAAAAALODl6gJQuEWM+NzVJQAAAACAW2IPNwAAAAAAFmAPNwAAAADLOPuIyaOT2zl1fYCV2MMNAAAAAIAFCNwAAAAAAFiAwA0AAAAAgAUI3AAAAAAAWIDADQAAAACABQjcAAAAAABYgNuCASg2nHlbEm5JAgAAgFthDzcAAAAAABYgcAMAAAAAYAECNwAAAAAAFiBwAwAAAABgAQI3AAAAAAAWIHADAAAAAGABAjcAAAAAABYgcAMAAAAAYAECNwAAAAAAFiBwAwAAAABgAQI3AAAAAAAWIHADAAAAAGABAjcAAAAAABYgcAMAAAAAYAECNwAAAAAAFiBwAwAAAABgAQI3AAAAAAAWIHADAAAAAGABAjcAAAAAABYgcAMAAAAAYAECNwAAAAAAFiBwAwAAAABgAQI3AAAAAAAWIHADAAAAAGABAjcAAAAAABYgcAMAAAAAYAECNwAAAAAAFiBwAwAAAABgAS9XFwAAAICsIkZ87uoSgELJmf83jk5u57R1AdlhDzcAAAAAABZwaeCeNGmSGjdurMDAQJUvX14dO3bU/v37HfpcvnxZ/fv3V5kyZRQQEKDOnTsrKSnJRRUDAAAAAJA7Lg3cGzZsUP/+/bV582atWbNGV69eVZs2bXTx4kV7n6FDh+qzzz7T0qVLtWHDBp04cUKdOnVyYdUAAAAAANyaS8/hXrVqlcPzhQsXqnz58tqxY4ceeOABpaSk6K233tLixYvVsmVLSdKCBQtUq1Ytbd68WX/6059cUTYAAAAAALdUqM7hTklJkSSVLl1akrRjxw5dvXpVMTEx9j5RUVGqXLmyEhISsl1HWlqaUlNTHR4AAAAAABS0QhO4MzIyNGTIEDVr1kx169aVJCUmJsrHx0clS5Z06BsSEqLExMRs1zNp0iQFBwfbH5UqVbK6dAAAAAAAsig0gbt///7at2+flixZclvrGTlypFJSUuyP48ePO6lCAAAAAAByr1AE7gEDBui///2v1q1bp4oVK9rbQ0NDdeXKFSUnJzv0T0pKUmhoaLbr8vX1VVBQkMMDAAD8jruEAABQMFwauI0xGjBggJYtW6avvvpKkZGRDssbNmwob29vxcfH29v279+vY8eOKTo6uqDLBQCgSOAuIQAAFAyXXqW8f//+Wrx4sT799FMFBgbaz8sODg6Wv7+/goOD1atXLz333HMqXbq0goKCNHDgQEVHR3OFcgAA8om7hAAAUDBcuod7zpw5SklJUYsWLRQWFmZ/fPDBB/Y+r732mh5++GF17txZDzzwgEJDQ/XJJ5+4sGoAAIoWZ9wlBAAAZOXSPdzGmFv28fPz0+zZszV79uwCqAgAgOLFWXcJSUtLU1pamv05t+UEAKCQXDQNAAC4hrPuEsJtOQEAyIrADQBAMeXMu4RwW04AALIicAMAUMxYcZcQbssJAEBWLj2HGwAAFDzuEgIAQMEgcAMAUMzMmTNHktSiRQuH9gULFqhHjx6Srt8lxMPDQ507d1ZaWppiY2P1xhtvFHClAAC4NwI3AADFDHcJAQCgYHAONwAAAAAAFiBwAwAAAABgAQI3AAAAAAAWIHADAAAAAGABAjcAAAAAABYgcAMAAAAAYAECNwAAAAAAFiBwAwAAAABgAQI3AAAAAAAW8HJ1AXC+iBGfu7oEAAAAACj22MMNAAAAAIAFCNwAAAAAAFiAwA0AAAAAgAUI3AAAAAAAWICLpgEAgGLJ2RcZPTq5nVPXBwBwf+zhBgAAAADAAgRuAAAAAAAsQOAGAAAAAMACBG4AAAAAACxA4AYAAAAAwAIEbgAAAAAALEDgBgAAAADAAtyHGwAAwAmcfV9vAID7Yw83AAAAAAAWIHADAAAAAGABAjcAAAAAABYgcAMAAAAAYAEumgYAAACgWHL2xQ6PTm7n1PXB/bGHGwAAAAAACxC4AQAAAACwAIEbAAAAAAALcA43AOQD53wBAADgVtjDDQAAAACABQjcAAAAAABYgMANAAAAAIAFCNwAAAAAAFiAwA0AAAAAgAUI3AAAAAAAWIDADQAAAACABQjcAAAAAABYgMANAAAAAIAFCNwAAAAAAFiAwA0AAAAAgAUI3AAAAAAAWIDADQAAAACABbxcXQAAQIoY8blT13d0cjunrg8AAAB5xx5uAAAAAAAsQOAGAAAAAMACBG4AAAAAACxA4AYAAAAAwAIEbgAAAAAALMBVygEAAADACbjrCG7EHm4AAAAAACxA4AYAAAAAwAIcUg4ARZCzD2lzNg6RAwAAxQF7uAEAAAAAsACBGwAAAAAACxC4AQAAAACwAOdwFwKF/VxLAAAAAEDeEbgBAIBb4A/UAHB7uE94weOQcgAAAAAALEDgBgAAAADAAhxSDgAocBzSBgAAigMCNwAAAAAgz/gD+q1xSDkAAAAAABYgcAMAAAAAYAECNwAAAAAAFiBwAwAAAABgAS6aBgAAAACFkLMvSlbYFcWLsLGHGwAAAAAACxC4AQAAAACwgFscUj579mz985//VGJiou666y7NnDlT9957r8vqKW6HdgBAcVMUD2nLj8I2/wIA4G4K/R7uDz74QM8995zGjBmjnTt36q677lJsbKxOnTrl6tIAACiymH8BALh9hT5wT5s2Tb1791bPnj1Vu3ZtzZ07V3fccYfefvttV5cGAECRxfwLAMDtK9SB+8qVK9qxY4diYmLsbR4eHoqJiVFCQoILKwMAoOhi/gUAwDkK9TncZ86cUXp6ukJCQhzaQ0JC9OOPP2b7mrS0NKWlpdmfp6SkSJJSU1OdVldG2iWnrQsAcPuc+Ttecv7veWfWl7kuY4zT1nkj5l8AQFFQGObfQh2482PSpEkaN25clvZKlSq5oBoAQEEInu7qCm7OivrOnz+v4OBg5684n5h/AQCFTWGYfwt14C5btqw8PT2VlJTk0J6UlKTQ0NBsXzNy5Eg999xz9ucZGRk6e/asypQpI5vNlq86UlNTValSJR0/flxBQUH5WkdRxdhkj3HJGWOTPcYlZ4yNI2OMzp8/r/DwcMveo7DMv7nFZ+Q6xuF3jMXvGIvfMRa/Yyx+l9uxyO/8W6gDt4+Pjxo2bKj4+Hh17NhR0vUJPD4+XgMGDMj2Nb6+vvL19XVoK1mypFPqCQoKKvYfyJwwNtljXHLG2GSPcckZY/M7q/dsF7b5N7f4jFzHOPyOsfgdY/E7xuJ3jMXvcjMW+Zl/C3XglqTnnntOcXFxatSoke69915Nnz5dFy9eVM+ePV1dGgAARRbzLwAAt6/QB+7HH39cp0+f1ujRo5WYmKi7775bq1atynIhFwAA4DzMvwAA3L5CH7glacCAATkewlYQfH19NWbMmCyHyoGxyQnjkjPGJnuMS84YG9dx9fybW3xGrmMcfsdY/I6x+B1j8TvG4ndWj4XNWHlfEQAAAAAAiikPVxcAAAAAAEBRROAGAAAAAMACBG4AAAAAACxA4P7/xo4dK5vN5vCIioqyL798+bL69++vMmXKKCAgQJ07d1ZSUpILK7bO//73P7Vv317h4eGy2Wxavny5w3JjjEaPHq2wsDD5+/srJiZGBw8edOhz9uxZdevWTUFBQSpZsqR69eqlCxcuFOBWON+txqVHjx5ZPkMPPfSQQ5+iOC6SNGnSJDVu3FiBgYEqX768OnbsqP379zv0yc3/oWPHjqldu3a64447VL58eQ0bNkzXrl0ryE1xqtyMS4sWLbJ8bp555hmHPkVtXCRpzpw5ql+/vv2el9HR0Vq5cqV9eXH8vODmnPV7pqiZPHmybDabhgwZYm8rbuPwyy+/6Mknn1SZMmXk7++vevXqafv27fblufne4u7S09M1atQoRUZGyt/fX9WqVdPLL7+sP16qqaiOA99bf3ezsbh69apeeOEF1atXTyVKlFB4eLj+9re/6cSJEw7rKA5jcaNnnnlGNptN06dPd2h31lgQuP+gTp06OnnypP3xzTff2JcNHTpUn332mZYuXaoNGzboxIkT6tSpkwurtc7Fixd11113afbs2dkunzp1qmbMmKG5c+dqy5YtKlGihGJjY3X58mV7n27duum7777TmjVr9N///lf/+9//1KdPn4LaBEvcalwk6aGHHnL4DL3//vsOy4viuEjShg0b1L9/f23evFlr1qzR1atX1aZNG128eNHe51b/h9LT09WuXTtduXJFmzZt0jvvvKOFCxdq9OjRrtgkp8jNuEhS7969HT43U6dOtS8riuMiSRUrVtTkyZO1Y8cObd++XS1btlSHDh303XffSSqenxfcnDN+zxQ127Zt05tvvqn69es7tBencTh37pyaNWsmb29vrVy5Ut9//71effVVlSpVyt4nN99b3N2UKVM0Z84czZo1Sz/88IOmTJmiqVOnaubMmfY+RXUc+N76u5uNxaVLl7Rz506NGjVKO3fu1CeffKL9+/frkUcecehXHMbij5YtW6bNmzcrPDw8yzKnjYWBMcaYMWPGmLvuuivbZcnJycbb29ssXbrU3vbDDz8YSSYhIaGAKnQNSWbZsmX25xkZGSY0NNT885//tLclJycbX19f8/777xtjjPn++++NJLNt2zZ7n5UrVxqbzWZ++eWXAqvdSjeOizHGxMXFmQ4dOuT4muIwLplOnTplJJkNGzYYY3L3f+iLL74wHh4eJjEx0d5nzpw5JigoyKSlpRXsBljkxnExxpjmzZubwYMH5/ia4jAumUqVKmX+/e9/83lBruTn90xRcv78eVOjRg2zZs0ah98jxW0cXnjhBXPffffluDw331uKgnbt2pmnnnrKoa1Tp06mW7duxpjiMw58b/1ddt9Vb7R161Yjyfz888/GmOI3Fv/3f/9nKlSoYPbt22eqVKliXnvtNfsyZ44Fe7j/4ODBgwoPD1fVqlXVrVs3HTt2TJK0Y8cOXb16VTExMfa+UVFRqly5shISElxVrkscOXJEiYmJDmMRHBysJk2a2MciISFBJUuWVKNGjex9YmJi5OHhoS1bthR4zQVp/fr1Kl++vGrWrKl+/frp119/tS8rTuOSkpIiSSpdurSk3P0fSkhIUL169RQSEmLvExsbq9TUVPteT3d347hkWrRokcqWLau6detq5MiRunTpkn1ZcRiX9PR0LVmyRBcvXlR0dDSfF+RKfn7PFCX9+/dXu3btHLZXKn7jsGLFCjVq1EiPPfaYypcvrwYNGmj+/Pn25bn53lIUNG3aVPHx8Tpw4IAkaffu3frmm2/Utm1bScVnHG7E99abS0lJkc1mU8mSJSUVr7HIyMhQ9+7dNWzYMNWpUyfLcmeOhddtV1tENGnSRAsXLlTNmjV18uRJjRs3Tvfff7/27dunxMRE+fj42D+MmUJCQpSYmOiagl0kc3v/+CU383nmssTERJUvX95huZeXl0qXLl2kx+uhhx5Sp06dFBkZqcOHD+sf//iH2rZtq4SEBHl6ehabccnIyNCQIUPUrFkz1a1bV5Jy9X8oMTEx289V5jJ3l924SNJf//pXValSReHh4dqzZ49eeOEF7d+/X5988omkoj0ue/fuVXR0tC5fvqyAgAAtW7ZMtWvX1q5du4r95wU3l9/fM0XFkiVLtHPnTm3bti3LsuI0DpL0008/ac6cOXruuef0j3/8Q9u2bdOgQYPk4+OjuLi4XH1vKQpGjBih1NRURUVFydPTU+np6ZowYYK6desmKXff34oivrfm7PLly3rhhRf0xBNPKCgoSFLxGospU6bIy8tLgwYNyna5M8eCwP3/Zf4FUJLq16+vJk2aqEqVKvrwww/l7+/vwsrgLrp27Wr/d7169VS/fn1Vq1ZN69evV6tWrVxYWcHq37+/9u3b53ANBOQ8Ln88F6hevXoKCwtTq1atdPjwYVWrVq2gyyxQNWvW1K5du5SSkqKPPvpIcXFx2rBhg6vLghsozr9njh8/rsGDB2vNmjXy8/NzdTkul5GRoUaNGmnixImSpAYNGmjfvn2aO3eu4uLiXFxdwfnwww+1aNEiLV68WHXq1NGuXbs0ZMgQhYeHF6txQO5cvXpVXbp0kTFGc+bMcXU5BW7Hjh16/fXXtXPnTtlsNsvfj0PKc1CyZEndeeedOnTokEJDQ3XlyhUlJyc79ElKSlJoaKhrCnSRzO298WqnfxyL0NBQnTp1ymH5tWvXdPbs2WI1XlWrVlXZsmV16NAhScVjXAYMGKD//ve/WrdunSpWrGhvz83/odDQ0Gw/V5nL3FlO45KdJk2aSJLD56aojouPj4+qV6+uhg0batKkSbrrrrv0+uuvF/vPC27udn7PFAU7duzQqVOndM8998jLy0teXl7asGGDZsyYIS8vL4WEhBSLccgUFham2rVrO7TVqlXLflpgbr63FAXDhg3TiBEj1LVrV9WrV0/du3fX0KFDNWnSJEnFZxxuxPfWrDLD9s8//6w1a9bY925LxWcsvv76a506dUqVK1e2/x79+eef9fe//10RERGSnDsWBO4cXLhwQYcPH1ZYWJgaNmwob29vxcfH25fv379fx44dU3R0tAurLHiRkZEKDQ11GIvU1FRt2bLFPhbR0dFKTk7Wjh077H2++uorZWRk2MNEcfB///d/+vXXXxUWFiapaI+LMUYDBgzQsmXL9NVXXykyMtJheW7+D0VHR2vv3r0Ov9wyJ4Ibv0y5i1uNS3Z27dolSQ6fm6I2LjnJyMhQWlpasf284Oac8XumKGjVqpX27t2rXbt22R+NGjVSt27d7P8uDuOQqVmzZlluD3fgwAFVqVJFUu6+txQFly5dkoeH49d6T09PZWRkSCo+43Ajvrc6ygzbBw8e1Nq1a1WmTBmH5cVlLLp37649e/Y4/B4NDw/XsGHDtHr1aklOHov8XOmtKPr73/9u1q9fb44cOWI2btxoYmJiTNmyZc2pU6eMMcY888wzpnLlyuarr74y27dvN9HR0SY6OtrFVVvj/Pnz5ttvvzXffvutkWSmTZtmvv32W/sVDCdPnmxKlixpPv30U7Nnzx7ToUMHExkZaX777Tf7Oh566CHToEEDs2XLFvPNN9+YGjVqmCeeeMJVm+QUNxuX8+fPm+eff94kJCSYI0eOmLVr15p77rnH1KhRw1y+fNm+jqI4LsYY069fPxMcHGzWr19vTp48aX9cunTJ3udW/4euXbtm6tata9q0aWN27dplVq1aZcqVK2dGjhzpik1yiluNy6FDh8z48ePN9u3bzZEjR8ynn35qqlatah544AH7OoriuBhjzIgRI8yGDRvMkSNHzJ49e8yIESOMzWYzX375pTGmeH5ecHPO+D1TVN14t4PiNA5bt241Xl5eZsKECebgwYNm0aJF5o477jDvvfeevU9uvre4u7i4OFOhQgXz3//+1xw5csR88sknpmzZsmb48OH2PkV1HPje+rubjcWVK1fMI488YipWrGh27drl8Hv0j3f3KA5jkZ0br1JujPPGgsD9/z3++OMmLCzM+Pj4mAoVKpjHH3/cHDp0yL78t99+M88++6wpVaqUueOOO8xf/vIXc/LkSRdWbJ1169YZSVkecXFxxpjrt1gYNWqUCQkJMb6+vqZVq1Zm//79Duv49ddfzRNPPGECAgJMUFCQ6dmzpzl//rwLtsZ5bjYuly5dMm3atDHlypUz3t7epkqVKqZ3794OtywypmiOizEm23GRZBYsWGDvk5v/Q0ePHjVt27Y1/v7+pmzZsubvf/+7uXr1agFvjfPcalyOHTtmHnjgAVO6dGnj6+trqlevboYNG2ZSUlIc1lPUxsUYY5566ilTpUoV4+PjY8qVK2datWplD9vGFM/PC27OWb9niqIbA3dxG4fPPvvM1K1b1/j6+pqoqCgzb948h+W5+d7i7lJTU83gwYNN5cqVjZ+fn6latap58cUXHYJUUR0Hvrf+7mZjceTIkRx/j65bt86+juIwFtnJLnA7ayxsxhiTt33iAAAAAADgVjiHGwAAAAAACxC4AQAAAACwAIEbAAAAAAALELgBAAAAALAAgRsAAAAAAAsQuAEAAAAAsACBGwAAAAAACxC4AQAAAACwAIEbQKHQo0cPdezY0dVlAABQrDD/AtYicAPFjKsn1qNHj8pms2nXrl0uqwEAgILG/AsUTwRuAAAAAAAsQOAGYLdv3z61bdtWAQEBCgkJUffu3XXmzBn78hYtWmjQoEEaPny4SpcurdDQUI0dO9ZhHT/++KPuu+8++fn5qXbt2lq7dq1sNpuWL18uSYqMjJQkNWjQQDabTS1atHB4/b/+9S+FhYWpTJky6t+/v65evWrlJgMA4HLMv0DRReAGIElKTk5Wy5Yt1aBBA23fvl2rVq1SUlKSunTp4tDvnXfeUYkSJbRlyxZNnTpV48eP15o1ayRJ6enp6tixo+644w5t2bJF8+bN04svvujw+q1bt0qS1q5dq5MnT+qTTz6xL1u3bp0OHz6sdevW6Z133tHChQu1cOFCazccAAAXYv4FijYvVxcAoHCYNWuWGjRooIkTJ9rb3n77bVWqVEkHDhzQnXfeKUmqX7++xowZI0mqUaOGZs2apfj4eLVu3Vpr1qzR4cOHtX79eoWGhkqSJkyYoNatW9vXWa5cOUlSmTJl7H0ylSpVSrNmzZKnp6eioqLUrl07xcfHq3fv3pZuOwAArsL8CxRtBG4AkqTdu3dr3bp1CggIyLLs8OHDDhP+H4WFhenUqVOSpP3796tSpUoOE/m9996b6xrq1KkjT09Ph3Xv3bs3T9sBAIA7Yf4FijYCNwBJ0oULF9S+fXtNmTIly7KwsDD7v729vR2W2Ww2ZWRkOKUGK9cNAEBhxPwLFG0EbgCSpHvuuUcff/yxIiIi5OWVv18NNWvW1PHjx5WUlKSQkBBJ0rZt2xz6+Pj4SLp+vhkAAMUd8y9QtHHRNKAYSklJ0a5duxweffr00dmzZ/XEE09o27ZtOnz4sFavXq2ePXvmenJu3bq1qlWrpri4OO3Zs0cbN27USy+9JOn6X8slqXz58vL397dfFCYlJcWy7QQAoDBh/gWKHwI3UAytX79eDRo0cHi8/PLL2rhxo9LT09WmTRvVq1dPQ4YMUcmSJeXhkbtfFZ6enlq+fLkuXLigxo0b6+mnn7ZfJdXPz0+S5OXlpRkzZujNN99UeHi4OnToYNl2AgBQmDD/AsWPzRhjXF0EgKJr48aNuu+++3To0CFVq1bN1eUAAFAsMP8ChQOBG4BTLVu2TAEBAapRo4YOHTqkwYMHq1SpUvrmm29cXRoAAEUW8y9QOHHRNABOdf78eb3wwgs6duyYypYtq5iYGL366quuLgsAgCKN+RconNjDDQAAAACABbhoGgAAAAAAFiBwAwAAAABgAQI3AAAAAAAWIHADAAAAAGABAjcAAAAAABYgcAMAAAAAYAECNwAAAAAAFiBwAwAAAABgAQI3AAAAAAAW+H9oKjHkh8zYigAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "fig, axes = plt.subplots(1, 2, figsize=(10, 5) )\n", + "axes[0].hist(Len_SciTLDR_test_Textt, bins = 20 )\n", + "axes[0].set_title(\"SciTLDR_test_Text Length\")\n", + "axes[0].set_xlabel(\"Length\")\n", + "axes[0].set_ylabel(\"Count\")\n", + "\n", + "axes[1].hist(Len_SciTLDR_test_Summaryy, bins = 20 )\n", + "axes[1].set_title(\"SciTLDR_test_Summary Length\")\n", + "axes[1].set_xlabel(\"Length\")\n", + "plt.tight_layout()\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "591bc3b3", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 566, + "id": "ab4ae22b", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_test.drop(NN_SciTLDR_test[NN_SciTLDR_test.Len_SciTLDR_test_Text < 50].index, inplace=True)\n", + "NN_SciTLDR_test.drop(NN_SciTLDR_test[NN_SciTLDR_test.Len_SciTLDR_test_Text > 350].index, inplace=True)\n", + "\n", + "NN_SciTLDR_test.drop(NN_SciTLDR_test[NN_SciTLDR_test.Len_SciTLDR_test_Summary < 30].index, inplace=True)\n", + "NN_SciTLDR_test.drop(NN_SciTLDR_test[NN_SciTLDR_test.Len_SciTLDR_test_Summary > 140].index, inplace=True)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 567, + "id": "230ea821", + "metadata": {}, + "outputs": [], + "source": [ + "NeW_Len_SciTLDR_test_Text = []\n", + "NeW_Len_SciTLDR_test_Text = NN_SciTLDR_test['Len_SciTLDR_test_Text'].values.tolist()\n", + "\n", + "NeW_Len_SciTLDR_test_Summary = []\n", + "NeW_Len_SciTLDR_test_Summary = NN_SciTLDR_test['Len_SciTLDR_test_Summary'].values.tolist()\n" + ] + }, + { + "cell_type": "code", + "execution_count": 569, + "id": "c1c33ee7", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA9wAAAHqCAYAAAD27EaEAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy88F64QAAAACXBIWXMAAA9hAAAPYQGoP6dpAABbm0lEQVR4nO3deVzVVf7H8fdlJxFQVJYEwSVxLVMz0tIQJcdMR1v0Z4ZLauWS2iZTrqmoU2kaYjqGNuVYNmk6pY6S2mRoilna4pYLk4KWAS5xNfj+/vDBna6AAvLlXvD1fDy+j4f3nHPP/XwPF8/3w/kuFsMwDAEAAAAAgHLl4ugAAAAAAACoiki4AQAAAAAwAQk3AAAAAAAmIOEGAAAAAMAEJNwAAAAAAJiAhBsAAAAAABOQcAMAAAAAYAISbgAAAAAATEDCDQAAAACACUi4ccPo1KmTOnXq5OgwgHLXqVMnNW/e3NFhAKiCmDsB53L06FFZLBa98sorjg4FJUTCDae2d+9ePfjgg6pXr568vLx08803q0uXLpo/f/519dupUydZLJZrbpMnT5YkhYeH6/77779qnwMHDrR7r4+Pj+rXr68HH3xQ//znP5Wfn3/NOLy9vdWyZUvNnTu3yPbXsmDBAi1durTU7yuN7777TpMnT9bRo0dL9b6SjLfFYtGWLVvKJc4TJ05o8uTJ2rNnT4naL126VBaLRbt27SqXzy9vpd0fADcu5s7Scea5s4BZP9MbmbP/sfqTTz6x/S6hcnNzdABAcb744gvde++9CgsL09ChQxUUFKT09HRt375dr7/+ukaNGlWq/v7973/b/v3iiy/q8ccft73euXOn5s2bp7/85S9q0qSJrbxly5al+gxPT0/97W9/kyT99ttvOnbsmNauXasHH3xQnTp10kcffSRfX1+799StW1cJCQmSpJ9//lnLly/X2LFjdfr0aU2fPr1Un79gwQLVqlVLAwcOLNX7SuO7777TlClT1KlTJ4WHh5f4fX//+9/tXr/99tvauHFjofI/jv/1OHHihKZMmaLw8HDddttt5dKnI1W1/QFgDubOqjV3SuX/M0Xl8MknnygxMZGkuwog4YbTmj59uvz8/LRz5075+/vb1Z06darU/Xl4eNj+3aVLF7s6Ly8vzZs3T126dLmuU+fc3Nz06KOP2pVNmzZNM2fOVHx8vIYOHar33nvPrt7Pz8/uPU888YQiIyM1f/58TZ06Va6urmWOx5lcOS7bt2/Xxo0bC5UDAMqOubNqzZ1S+f9MKzvDMJSbmytvb29HhwKUCKeUw2kdPnxYzZo1KzS5SFKdOnUKlb3zzju64447dNNNN6lGjRq655577P4y78jr0MaPH6+uXbtq5cqVOnDgwFXbenl5qW3btjp79mypJtLw8HB9++232rp1q+00uz/ub1ZWlsaMGaPQ0FB5enqqYcOGmjVrVqHT71asWKHWrVurevXq8vX1VYsWLfT6669Lunza9UMPPSRJuvfee8v9NPD8/HzNnTtXzZo1k5eXlwIDAzV8+HD9+uuvtjaTJk2Si4uLUlJS7N47bNgweXh46Ouvv9aWLVvUtm1bSdKgQYNscZbHKYM//fSTBg8erMDAQHl6eqpZs2Z666237Nps2bJFFotF77//vqZPn666devKy8tLnTt31qFDhwr1mZiYqPr168vb21t33HGH/vOf/9h9X0u6P999953uvfde3XTTTbr55ps1e/bs695fAJULc2fVmztL+jMtuLa3qLnuj6f6S9LkyZNlsVh04MABPfroo/Lz81Pt2rU1YcIEGYah9PR09ezZU76+vgoKCtKrr75q198f57kpU6bo5ptvVvXq1fXggw8qOztbVqtVY8aMUZ06deTj46NBgwbJarXa9ZGcnKzo6GjVqVNHnp6eatq0qZKSkgrFXnBpwoYNG9SmTRt5e3vrzTffVMeOHXXrrbcWOWaNGzdWbGzsVUa15NatW6e7775b1apVU/Xq1dW9e3d9++23dm0GDhwoHx8f/fTTT+rVq5d8fHxUu3ZtPfvss8rLy7Nr+8svv2jAgAHy9fWVv7+/4uLi9PXXX9v97AYOHKjExERJ9pfkXWnRokVq0KCBPD091bZtW+3cubNc9hnlixVuOK169eopNTVV+/btu+Y1NlOmTNHkyZN11113aerUqfLw8NCOHTv06aefqmvXrhUU8dUNGDBA//73v7Vx40bdcsstV21bMGkWNbkWZ+7cuRo1apR8fHz04osvSpICAwMlSRcuXFDHjh31008/afjw4QoLC9MXX3yh+Ph4nTx5UnPnzpUkbdy4Uf369VPnzp01a9YsSdL333+vbdu26emnn9Y999yj0aNHFzqFsLxOAx8+fLiWLl2qQYMGafTo0Tpy5IjeeOMNffXVV9q2bZvc3d310ksvae3atRoyZIj27t2r6tWra8OGDVq8eLFefvll3XrrrcrMzNTUqVM1ceJEDRs2THfffbck6a677rqu+DIzM3XnnXfKYrFo5MiRql27ttatW6chQ4YoJydHY8aMsWs/c+ZMubi46Nlnn1V2drZmz56t/v37a8eOHbY2SUlJGjlypO6++26NHTtWR48eVa9evVSjRg3VrVtX0uXxvdb+/Prrr7rvvvvUu3dvPfzww/rggw/0wgsvqEWLFurWrdt17TeAyoO5s+rNnaX5mZbWI488oiZNmmjmzJn6+OOPNW3aNNWsWVNvvvmmoqOjNWvWLL377rt69tln1bZtW91zzz12709ISJC3t7fGjx+vQ4cOaf78+XJ3d5eLi4t+/fVXTZ48Wdu3b9fSpUsVERGhiRMn2t6blJSkZs2a6YEHHpCbm5vWrl2rp556Svn5+RoxYoTd5+zfv1/9+vXT8OHDNXToUDVu3Fg+Pj4aOnRooXHZuXOnDhw4oJdeeum6x+fvf/+74uLiFBsbq1mzZunChQtKSkpShw4d9NVXX9ldHpCXl6fY2Fi1a9dOr7zyijZt2qRXX31VDRo00JNPPinp8sJCjx499OWXX+rJJ59UZGSkPvroI8XFxdl97vDhw3XixIkiL70rsHz5cp09e1bDhw+XxWLR7Nmz1bt3b/34449yd3e/7n1HOTIAJ/Xvf//bcHV1NVxdXY2oqCjj+eefNzZs2GBcvHjRrt3BgwcNFxcX489//rORl5dnV5efn2/7d8eOHY2OHTsW+VkrV640JBmbN28usr5evXpG9+7drxpvXFycUa1atWLrv/rqK0OSMXbsWLuYIiMjjdOnTxunT582fvjhB+O5554zJF3z84rSrFmzIvfx5ZdfNqpVq2YcOHDArnz8+PGGq6urcfz4ccMwDOPpp582fH19jd9//73Yz7jWWJXUiBEjjD/+F/Sf//zHkGS8++67du3Wr19fqHzv3r2Gh4eH8fjjjxu//vqrcfPNNxtt2rQxLl26ZGuzc+dOQ5KRnJxconiSk5MNScbOnTuLbTNkyBAjODjY+Pnnn+3K+/bta/j5+RkXLlwwDMMwNm/ebEgymjRpYlitVlu7119/3ZBk7N271zAMw7BarUZAQIDRtm1bu9iXLl1qSLL7WV5tfzp27GhIMt5++21bmdVqNYKCgow+ffqUaP8BVA3MnVVv7izpz/TIkSPFzhOSjEmTJtleT5o0yZBkDBs2zFb2+++/G3Xr1jUsFosxc+ZMW/mvv/5qeHt7G3FxcbaygnmuefPmdnH069fPsFgsRrdu3ew+PyoqyqhXr55dWcGc+UexsbFG/fr17crq1atnSDLWr19vV56VlWV4eXkZL7zwgl356NGjjWrVqhnnzp0r1P8fdezY0WjWrFmx9WfPnjX8/f2NoUOH2pVnZGQYfn5+duVxcXGGJGPq1Kl2bVu1amW0bt3a9vqf//ynIcmYO3eurSwvL8+Ijo4u9LO78jipQMHPOSAgwDhz5oyt/KOPPjIkGWvXrr3qfqPicUo5nFaXLl2UmpqqBx54QF9//bVmz56t2NhY3XzzzVqzZo2t3erVq5Wfn6+JEyfKxcX+K13U6TeO4uPjI0k6e/asXfkPP/yg2rVrq3bt2oqMjNRf//pXPfDAA+V6x9SVK1fq7rvvVo0aNfTzzz/btpiYGOXl5emzzz6TJPn7++v8+fPauHFjuX12aWL08/NTly5d7GJs3bq1fHx8tHnzZlvb5s2ba8qUKfrb3/6m2NhY/fzzz1q2bJnc3Mw7accwDP3zn/9Ujx49ZBiGXYyxsbHKzs7W7t277d4zaNAgu+sfC1amf/zxR0nSrl279Msvv2jo0KF2sffv3181atQoVXw+Pj521zN6eHjojjvusH0WgBsDc+fScvtsZ5k7S/ozLYs/3gTP1dVVbdq0kWEYGjJkiK3c399fjRs3LnI+eeyxx+xWU9u1ayfDMDR48GC7du3atVN6erp+//13W9kfr8HOzs7Wzz//rI4dO+rHH39Udna23fsjIiIKnSLu5+ennj176h//+IcMw5B0eZX5vffeU69evVStWrXSDEUhGzduVFZWlvr162f383d1dVW7du3sjksKPPHEE3av7777brtxW79+vdzd3TV06FBbmYuLS6EV/ZJ45JFH7I4VrjzGgPPglHI4tbZt2+rDDz/UxYsX9fXXX2vVqlWaM2eOHnzwQe3Zs0dNmzbV4cOH5eLioqZNmzo63Ks6d+6cJKl69ep25eHh4Vq8eLHy8/N1+PBhTZ8+XadPn5aXl1e5ffbBgwf1zTffqHbt2kXWF1zv9tRTT+n9999Xt27ddPPNN6tr1656+OGHdd9995VbLFeLMTs7u8hrDP8YY4HnnntOK1as0JdffqkZM2aY/vM/ffq0srKytGjRIi1atKhEMYaFhdm9LpgYC65JP3bsmCSpYcOGdu3c3NxKfRfbunXrFjpIrlGjhr755ptS9QOg8mPuLB/ONHeW5GdaFlfOU35+fvLy8lKtWrUKlf/yyy8ler8khYaGFirPz89Xdna2AgICJEnbtm3TpEmTlJqaqgsXLti1z87OtvUlXU64i/LYY4/pvffe03/+8x/dc8892rRpkzIzMzVgwICr7XaJHDx4UJIUHR1dZP2Vd8738vIq9F2pUaOG3X1ojh07puDgYN1000127a48DiiJax1jwHmQcKNS8PDwUNu2bdW2bVvdcsstGjRokFauXKlJkyY5OrQS27dvn6TC/6lWq1ZNMTExttft27fX7bffrr/85S+aN29euXx2fn6+unTpoueff77I+oLr4urUqaM9e/Zow4YNWrdundatW6fk5GQ99thjWrZsWbnEcrUY69Spo3fffbfI+isnsR9//NE2Ge7du9fU2Ariky7fbf3Ka60KXPkonOLuklvwl/jyVJGfBaByYO68Ps44d17tZ1rcmQlX3rTrj4qaO0oznxTX9lp9HD58WJ07d1ZkZKRee+01hYaGysPDQ5988onmzJlT6KZ0xd2RPDY2VoGBgXrnnXd0zz336J133lFQUJDdd6OsCmL4+9//rqCgoEL1V55VV9F3xmferzxIuFHptGnTRpJ08uRJSVKDBg2Un5+v7777zqmfT/z3v/9dFoul0GNVrtSyZUs9+uijevPNN/Xss88W+gvm1RQ32TZo0EDnzp0r0QTk4eGhHj16qEePHsrPz9dTTz2lN998UxMmTFDDhg1NO9WwQYMG2rRpk9q3b3/NR33k5+dr4MCB8vX11ZgxYzRjxgw9+OCD6t27t61NecdZu3ZtVa9eXXl5eeUykUuXb4QjSYcOHdK9995rK//999919OhRuwTemU7xBFD5MHcWr7LOnVf+TAtWOLOysuzaFZxN5UzWrl0rq9WqNWvW2P2sijpN+2pcXV31f//3f1q6dKlmzZql1atXa+jQoeWS/DZo0EDS5T+olOe8v3nzZl24cMFulbuoJ5gw71cdXMMNp7V58+Yi/0r3ySefSLr8yAdJ6tWrl1xcXDR16tRCfxF1lr/yzZw5U//+97/1yCOPqFGjRtds//zzz+vSpUt67bXXSvU51apVKzTRStLDDz+s1NRUbdiwoVBdVlaW7ZqqK08Xc3FxsSV9BY/zKLgmqqjPuR4PP/yw8vLy9PLLLxeq+/333+0+77XXXtMXX3yhRYsW6eWXX9Zdd92lJ598Uj///LOtTXnH6erqqj59+uif//ynbcXlj06fPl3qPtu0aaOAgAAtXrzY7rq2d999t9ApYWaNO4Cqhbmz6s2dJf2Z+vr6qlatWrZrywssWLCg1J9ptoKE+I/7lZ2dreTk5FL3NWDAAP36668aPny4zp07V+iZ7mUVGxsrX19fzZgxQ5cuXSpUX5Z5PzY2VpcuXdLixYttZfn5+bZHgP0R837VwQo3nNaoUaN04cIF/fnPf1ZkZKQuXryoL774Qu+9957Cw8M1aNAgSZdPM3vxxRf18ssv6+6771bv3r3l6empnTt3KiQkRAkJCeUSz6FDhzRt2rRC5a1atVL37t0lXU4M33nnHUlSbm6ujh07pjVr1uibb77RvffeW+y1v1dq2rSp/vSnP+lvf/ubJkyYYLve6Vpat26tpKQkTZs2TQ0bNlSdOnUUHR2t5557TmvWrNH999+vgQMHqnXr1jp//rz27t2rDz74QEePHlWtWrX0+OOP68yZM4qOjlbdunV17NgxzZ8/X7fddpvt8SW33XabXF1dNWvWLGVnZ8vT09P2HM3r0bFjRw0fPlwJCQnas2ePunbtKnd3dx08eFArV67U66+/rgcffFDff/+9JkyYoIEDB6pHjx6SLj/j9LbbbrNdRydd/su0v7+/Fi5cqOrVq6tatWpq165dsdeBFXjrrbe0fv36QuVPP/20Zs6cqc2bN6tdu3YaOnSomjZtqjNnzmj37t3atGmTzpw5U6p99vDw0OTJkzVq1ChFR0fr4Ycf1tGjR7V06VI1aNDA7q/bZd0fADcW5s6qN3eW9GcqXb4J2syZM/X444+rTZs2+uyzz675DHNH6Nq1q+2sgIJEefHixapTp45txb6kWrVqpebNm2vlypVq0qSJbr/99hK/9/Tp00V+PyMiItS/f38lJSVpwIABuv3229W3b1/Vrl1bx48f18cff6z27dvrjTfeKFWsvXr10h133KFnnnlGhw4dUmRkpNasWWM7fvjjvN+6dWtJ0ujRoxUbGytXV1f17du3VJ8HJ1HxN0YHSmbdunXG4MGDjcjISMPHx8fw8PAwGjZsaIwaNcrIzMws1P6tt94yWrVqZXh6eho1atQwOnbsaGzcuNFWf72PNpFU5DZkyBDDMP73SIiC7aabbjLCw8ONPn36GB988EGhx64UxFTcIym2bNlS6DEe15KRkWF0797dqF69eqHHSp09e9aIj483GjZsaHh4eBi1atUy7rrrLuOVV16xPdLjgw8+MLp27WrUqVPH8PDwMMLCwozhw4cbJ0+etPucxYsXG/Xr1zdcXV3L/JiT4h53sWjRIqN169aGt7e3Ub16daNFixbG888/b5w4ccL4/fffjbZt2xp169Y1srKy7N5X8Mit9957z1b20UcfGU2bNjXc3Nyu+YiwgseCFbelp6cbhmEYmZmZxogRI4zQ0FDD3d3dCAoKMjp37mwsWrTI1lfB41JWrlxp9xnFPbJl3rx5Rr169QxPT0/jjjvuMLZt22a0bt3auO++++zaFbc/xX2P4uLiCj2GBUDVxtxZ9ebO0vxML1y4YAwZMsTw8/Mzqlevbjz88MPGqVOnin0s2OnTp+3eX9xj2q4c8+LmueIesVnU561Zs8Zo2bKl4eXlZYSHhxuzZs0y3nrrLUOSceTIEVu7kjxebvbs2YYkY8aMGVdtd+U+Fff97Ny5s92+xsbGGn5+foaXl5fRoEEDY+DAgcauXbtsbYobt4L9/qPTp08b//d//2dUr17d8PPzMwYOHGhs27bNkGSsWLHC1u733383Ro0aZdSuXduwWCy2fgqOJf76178W+rzSfvdRMSyG4STnDQEAJF0+vax27drq3bu33WlnAACgsNdff11jx47V0aNHS3X9vrNYvXq1/vznP+vzzz9X+/btHR0OyhnXcAOAA+Xm5ha6Nu/tt9/WmTNn1KlTJ8cEBQBAJWEYhpYsWaKOHTtWimT7t99+s3udl5en+fPny9fXt1Snw6Py4BpuoBI4ffr0VR/r4eHhoZo1a1ZgRPbOnTtne1ZqcWrXrl3hj8yoDLZv366xY8fqoYceUkBAgHbv3q0lS5aoefPmeuihhxwdHgBUWsydVdv58+e1Zs0abd68WXv37tVHH33k6JBKZNSoUfrtt98UFRUlq9WqDz/8UF988YVmzJhxzae0oHLilHKgEggPD7/qYz06duyoLVu2VFxAV5g8ebKmTJly1TZHjhxReHh4xQRUiRw9elSjR4/Wl19+qTNnzqhmzZr605/+pJkzZ173jegA4EbG3Fm1HT16VBEREfL399dTTz2l6dOnOzqkElm+fLleffVVHTp0SLm5uWrYsKGefPJJjRw50tGhwSQk3EAlsG3btkKnIP1RjRo1bHezdIQff/xRP/7441XbdOjQQV5eXhUUEQDgRsfcCcAZkHADAAAAAGACbpoGAAAAAIAJqvxN0/Lz83XixAlVr17d7mHyAAA4K8MwdPbsWYWEhMjFpWr9bZx5GQBQ2VzPvFzlE+4TJ04oNDTU0WEAAFBq6enpqlu3rqPDKFfMywCAyqos83KVT7irV68u6fLg+Pr6OjgaAACuLScnR6GhobY5rCphXgYAVDbXMy9X+YS74HQ1X19fJnYAQKVSFU+5Zl4GAFRWZZmXq9aFYQAAAAAAOAkSbgAAAAAATEDCDQAAAACACUi4AQAAAAAwAQk3AAAAAAAmIOEGAAAAAMAEJNwAAAAAAJiAhBsAAEiS8vLyNGHCBEVERMjb21sNGjTQyy+/LMMwbG0Mw9DEiRMVHBwsb29vxcTE6ODBgw6MGgAA50XCDQAAJEmzZs1SUlKS3njjDX3//feaNWuWZs+erfnz59vazJ49W/PmzdPChQu1Y8cOVatWTbGxscrNzXVg5AAAOCc3RwcAAACcwxdffKGePXuqe/fukqTw8HD94x//0Jdffinp8ur23Llz9dJLL6lnz56SpLfffluBgYFavXq1+vbt67DYAQBwRqxwAwAASdJdd92llJQUHThwQJL09ddf6/PPP1e3bt0kSUeOHFFGRoZiYmJs7/Hz81O7du2UmprqkJgBAHBmrHADAABJ0vjx45WTk6PIyEi5uroqLy9P06dPV//+/SVJGRkZkqTAwEC79wUGBtrqrmS1WmW1Wm2vc3JyTIoeAADnwwo3AACQJL3//vt69913tXz5cu3evVvLli3TK6+8omXLlpW5z4SEBPn5+dm20NDQcowYAADnRsINAAAkSc8995zGjx+vvn37qkWLFhowYIDGjh2rhIQESVJQUJAkKTMz0+59mZmZtrorxcfHKzs727alp6ebuxMAADgREm4AACBJunDhglxc7A8NXF1dlZ+fL0mKiIhQUFCQUlJSbPU5OTnasWOHoqKiiuzT09NTvr6+dhsAADcKruEGAACSpB49emj69OkKCwtTs2bN9NVXX+m1117T4MGDJUkWi0VjxozRtGnT1KhRI0VERGjChAkKCQlRr169HBs8AABOiIQbAABIkubPn68JEyboqaee0qlTpxQSEqLhw4dr4sSJtjbPP/+8zp8/r2HDhikrK0sdOnTQ+vXr5eXl5cDIAQBwThbDMAxHB2GmnJwc+fn5KTs7m9PYUKmFj//Y1P6Pzuxuav8ASq4qz11Ved9udOU5TzEnAXAm1zN3cQ03AAAAAAAmIOEGAAAAAMAEJNwAAAAAAJiAhBsAAAAAABOQcAMAAAAAYAISbgAAAAAATEDCDQAAAACACUi4AQAAAAAwAQk3AAAAAAAmIOEGAAAAAMAEJNwAAAAAAJiAhBsAAAAAABOQcAMAAAAAYAISbgAAAAAATEDCDQAAAACACUi4AQAAAAAwAQk3AAAAAAAmcGjCnZeXpwkTJigiIkLe3t5q0KCBXn75ZRmGYWtjGIYmTpyo4OBgeXt7KyYmRgcPHnRg1AAAAAAAXJtDE+5Zs2YpKSlJb7zxhr7//nvNmjVLs2fP1vz5821tZs+erXnz5mnhwoXasWOHqlWrptjYWOXm5jowcgAAAAAArs7NkR/+xRdfqGfPnurevbskKTw8XP/4xz/05ZdfSrq8uj137ly99NJL6tmzpyTp7bffVmBgoFavXq2+ffs6LHYAAAAAAK7GoSvcd911l1JSUnTgwAFJ0tdff63PP/9c3bp1kyQdOXJEGRkZiomJsb3Hz89P7dq1U2pqapF9Wq1W5eTk2G0AAAAAAFQ0h65wjx8/Xjk5OYqMjJSrq6vy8vI0ffp09e/fX5KUkZEhSQoMDLR7X2BgoK3uSgkJCZoyZYq5gQMAAAAAcA0OXeF+//339e6772r58uXavXu3li1bpldeeUXLli0rc5/x8fHKzs62benp6eUYMQAAAAAAJePQFe7nnntO48ePt12L3aJFCx07dkwJCQmKi4tTUFCQJCkzM1PBwcG292VmZuq2224rsk9PT095enqaHjsAAAAAAFfj0BXuCxcuyMXFPgRXV1fl5+dLkiIiIhQUFKSUlBRbfU5Ojnbs2KGoqKgKjRUAAAAAgNJw6Ap3jx49NH36dIWFhalZs2b66quv9Nprr2nw4MGSJIvFojFjxmjatGlq1KiRIiIiNGHCBIWEhKhXr16ODB0AAAAAgKtyaMI9f/58TZgwQU899ZROnTqlkJAQDR8+XBMnTrS1ef7553X+/HkNGzZMWVlZ6tChg9avXy8vLy8HRg4ULXz8x44OAQAAAICTcGjCXb16dc2dO1dz584tto3FYtHUqVM1derUigsMAAAAAIDr5NBruAEAAAAAqKpIuAEAAAAAMAEJNwAAAAAAJiDhBgAAAADABCTcAAAAAACYgIQbAAAAAAATkHADAAAAAGACEm4AAAAAAExAwg0AAAAAgAlIuAEAAAAAMAEJNwAAkCSFh4fLYrEU2kaMGCFJys3N1YgRIxQQECAfHx/16dNHmZmZDo4aAADnRcINAAAkSTt37tTJkydt28aNGyVJDz30kCRp7NixWrt2rVauXKmtW7fqxIkT6t27tyNDBgDAqbk5OgAAAOAcateubfd65syZatCggTp27Kjs7GwtWbJEy5cvV3R0tCQpOTlZTZo00fbt23XnnXc6ImQAAJwaK9wAAKCQixcv6p133tHgwYNlsViUlpamS5cuKSYmxtYmMjJSYWFhSk1NLbYfq9WqnJwcuw0AgBsFK9wAAKCQ1atXKysrSwMHDpQkZWRkyMPDQ/7+/nbtAgMDlZGRUWw/CQkJmjJliomRAhUrfPzH5drf0Zndy7U/AM6FFW4AAFDIkiVL1K1bN4WEhFxXP/Hx8crOzrZt6enp5RQhAADOjxVuAABg59ixY9q0aZM+/PBDW1lQUJAuXryorKwsu1XuzMxMBQUFFduXp6enPD09zQwXAACnxQo3AACwk5ycrDp16qh79/+d6tq6dWu5u7srJSXFVrZ//34dP35cUVFRjggTAACnxwo3AACwyc/PV3JysuLi4uTm9r/DBD8/Pw0ZMkTjxo1TzZo15evrq1GjRikqKoo7lAMAUAwSbgAAYLNp0yYdP35cgwcPLlQ3Z84cubi4qE+fPrJarYqNjdWCBQscECUAAJUDCTcAALDp2rWrDMMoss7Ly0uJiYlKTEys4KgAAKicuIYbAAAAAAATkHADAAAAAGACEm4AAAAAAExAwg0AAAAAgAlIuAEAAAAAMAEJNwAAAAAAJiDhBgAAAADABCTcAAAAAACYgIQbAAAAAAATkHADAAAAAGACEm4AAAAAAExAwg0AAAAAgAlIuAEAAAAAMIGbIz88PDxcx44dK1T+1FNPKTExUbm5uXrmmWe0YsUKWa1WxcbGasGCBQoMDHRAtEDVFj7+Y1P7Pzqzu6n9AwAAAM7GoSvcO3fu1MmTJ23bxo0bJUkPPfSQJGns2LFau3atVq5cqa1bt+rEiRPq3bu3I0MGAAAAAKBEHLrCXbt2bbvXM2fOVIMGDdSxY0dlZ2dryZIlWr58uaKjoyVJycnJatKkibZv364777zTESEDAAAAAFAiTnMN98WLF/XOO+9o8ODBslgsSktL06VLlxQTE2NrExkZqbCwMKWmpjowUgAAAAAArs2hK9x/tHr1amVlZWngwIGSpIyMDHl4eMjf39+uXWBgoDIyMortx2q1ymq12l7n5OSYES4AAAAAAFflNCvcS5YsUbdu3RQSEnJd/SQkJMjPz8+2hYaGllOEAAAAAACUnFMk3MeOHdOmTZv0+OOP28qCgoJ08eJFZWVl2bXNzMxUUFBQsX3Fx8crOzvbtqWnp5sVNgAAAAAAxXKKhDs5OVl16tRR9+7/e2xQ69at5e7urpSUFFvZ/v37dfz4cUVFRRXbl6enp3x9fe02AAAAAAAqmsOv4c7Pz1dycrLi4uLk5va/cPz8/DRkyBCNGzdONWvWlK+vr0aNGqWoqCjuUA4AAAAAcHoOT7g3bdqk48ePa/DgwYXq5syZIxcXF/Xp00dWq1WxsbFasGCBA6JEVRE+/mNHhwAAAADgBuHwhLtr164yDKPIOi8vLyUmJioxMbGCowIAAICjlPcfyI/O7H7tRgBgAqe4hhsAAAAAgKqGhBsAAAAAABOQcAMAAAAAYAISbgAAAAAATEDCDQAAAACACUi4AQAAAAAwAQk3AAAAAAAmIOEGAAAAAMAEJNwAAAAAAJiAhBsAAAAAABOQcAMAAAAAYAISbgAAAAAATEDCDQAAAACACUi4AQAAAAAwAQk3AACw+emnn/Too48qICBA3t7eatGihXbt2mWrNwxDEydOVHBwsLy9vRUTE6ODBw86MGIAAJwXCTcAAJAk/frrr2rfvr3c3d21bt06fffdd3r11VdVo0YNW5vZs2dr3rx5WrhwoXbs2KFq1aopNjZWubm5DowcAADn5OboAAAAgHOYNWuWQkNDlZycbCuLiIiw/dswDM2dO1cvvfSSevbsKUl6++23FRgYqNWrV6tv374VHjMAAM6MFW4AACBJWrNmjdq0aaOHHnpIderUUatWrbR48WJb/ZEjR5SRkaGYmBhbmZ+fn9q1a6fU1NQi+7RarcrJybHbAAC4UZBwAwAASdKPP/6opKQkNWrUSBs2bNCTTz6p0aNHa9myZZKkjIwMSVJgYKDd+wIDA211V0pISJCfn59tCw0NNXcnAABwIiTcAABAkpSfn6/bb79dM2bMUKtWrTRs2DANHTpUCxcuLHOf8fHxys7Otm3p6enlGDEAAM6NhBsAAEiSgoOD1bRpU7uyJk2a6Pjx45KkoKAgSVJmZqZdm8zMTFvdlTw9PeXr62u3AQBwoyDhBgAAkqT27dtr//79dmUHDhxQvXr1JF2+gVpQUJBSUlJs9Tk5OdqxY4eioqIqNFYAACoD7lIOAAAkSWPHjtVdd92lGTNm6OGHH9aXX36pRYsWadGiRZIki8WiMWPGaNq0aWrUqJEiIiI0YcIEhYSEqFevXo4NHgAAJ0TCDQAAJElt27bVqlWrFB8fr6lTpyoiIkJz585V//79bW2ef/55nT9/XsOGDVNWVpY6dOig9evXy8vLy4GRAwDgnEi4AQCAzf3336/777+/2HqLxaKpU6dq6tSpFRgVAACVE9dwAwAAAABgAhJuAAAAAABMQMINAAAAAIAJSLgBAAAAADABCTcAAAAAACYg4QYAAAAAwAQk3AAAAAAAmICEGwAAAAAAE5BwAwAAAABgAocn3D/99JMeffRRBQQEyNvbWy1atNCuXbts9YZhaOLEiQoODpa3t7diYmJ08OBBB0YMAAAAAMC1OTTh/vXXX9W+fXu5u7tr3bp1+u677/Tqq6+qRo0atjazZ8/WvHnztHDhQu3YsUPVqlVTbGyscnNzHRg5AAAAAABX5+bID581a5ZCQ0OVnJxsK4uIiLD92zAMzZ07Vy+99JJ69uwpSXr77bcVGBio1atXq2/fvhUeMwAAAAAAJeHQFe41a9aoTZs2euihh1SnTh21atVKixcvttUfOXJEGRkZiomJsZX5+fmpXbt2Sk1NdUTIAAAAAACUiEMT7h9//FFJSUlq1KiRNmzYoCeffFKjR4/WsmXLJEkZGRmSpMDAQLv3BQYG2uquZLValZOTY7cBAAAAAFDRHHpKeX5+vtq0aaMZM2ZIklq1aqV9+/Zp4cKFiouLK1OfCQkJmjJlSnmGCQAAAABAqTl0hTs4OFhNmza1K2vSpImOHz8uSQoKCpIkZWZm2rXJzMy01V0pPj5e2dnZti09Pd2EyAEAAAAAuDqHJtzt27fX/v377coOHDigevXqSbp8A7WgoCClpKTY6nNycrRjxw5FRUUV2aenp6d8fX3tNgAAAAAAKppDTykfO3as7rrrLs2YMUMPP/ywvvzySy1atEiLFi2SJFksFo0ZM0bTpk1To0aNFBERoQkTJigkJES9evVyZOgAAAAAAFyVQxPutm3batWqVYqPj9fUqVMVERGhuXPnqn///rY2zz//vM6fP69hw4YpKytLHTp00Pr16+Xl5eXAyAEAACq38PEfOzoEAKjyHJpwS9L999+v+++/v9h6i8WiqVOnaurUqRUYFQAAAAAA18eh13ADAAAAAFBVkXADAAAAAGACEm4AAAAAAExAwg0AAAAAgAlIuAEAAAAAMAEJNwAAAAAAJiDhBgAAAADABCTcAAAAAACYgIQbAAAAAAATkHADAAAAAGACEm4AAAAAAExAwg0AAAAAgAlIuAEAAAAAMAEJNwAAAAAAJiDhBgAAAADABCTcAAAAAACYgIQbAAAAAAATkHADAABJ0uTJk2WxWOy2yMhIW31ubq5GjBihgIAA+fj4qE+fPsrMzHRgxAAAODcSbgAAYNOsWTOdPHnStn3++ee2urFjx2rt2rVauXKltm7dqhMnTqh3794OjBYAAOfm5ugAAACA83Bzc1NQUFCh8uzsbC1ZskTLly9XdHS0JCk5OVlNmjTR9u3bdeedd1Z0qAAAOD1WuAEAgM3BgwcVEhKi+vXrq3///jp+/LgkKS0tTZcuXVJMTIytbWRkpMLCwpSamuqocAEAcGqscAMAAElSu3bttHTpUjVu3FgnT57UlClTdPfdd2vfvn3KyMiQh4eH/P397d4TGBiojIyMYvu0Wq2yWq221zk5OWaFDwCA0yHhBgAAkqRu3brZ/t2yZUu1a9dO9erV0/vvvy9vb+8y9ZmQkKApU6aUV4gAAFQqnFIOAACK5O/vr1tuuUWHDh1SUFCQLl68qKysLLs2mZmZRV7zXSA+Pl7Z2dm2LT093eSoAQBwHiTcAACgSOfOndPhw4cVHBys1q1by93dXSkpKbb6/fv36/jx44qKiiq2D09PT/n6+tptAADcKDilHAAASJKeffZZ9ejRQ/Xq1dOJEyc0adIkubq6ql+/fvLz89OQIUM0btw41axZU76+vho1apSioqK4QzkAAMUg4QYAAJKk//73v+rXr59++eUX1a5dWx06dND27dtVu3ZtSdKcOXPk4uKiPn36yGq1KjY2VgsWLHBw1AAAOC8SbgAAIElasWLFVeu9vLyUmJioxMTECooIAIDKjWu4AQAAAAAwAQk3AAAAAAAmIOEGAAAAAMAEJNwAAAAAAJiAhBsAAAAAABOQcAMAAAAAYAISbgAAAAAATODQhHvy5MmyWCx2W2RkpK0+NzdXI0aMUEBAgHx8fNSnTx9lZmY6MGIAAAAAAErG4SvczZo108mTJ23b559/bqsbO3as1q5dq5UrV2rr1q06ceKEevfu7cBoAQAAAAAoGTeHB+DmpqCgoELl2dnZWrJkiZYvX67o6GhJUnJyspo0aaLt27frzjvvrOhQAQAAAAAoMYevcB88eFAhISGqX7+++vfvr+PHj0uS0tLSdOnSJcXExNjaRkZGKiwsTKmpqcX2Z7ValZOTY7cBAAAAAFDRHJpwt2vXTkuXLtX69euVlJSkI0eO6O6779bZs2eVkZEhDw8P+fv7270nMDBQGRkZxfaZkJAgPz8/2xYaGmryXgAAAAAAUJhDTynv1q2b7d8tW7ZUu3btVK9ePb3//vvy9vYuU5/x8fEaN26c7XVOTg5JNwAAAACgwjn8lPI/8vf31y233KJDhw4pKChIFy9eVFZWll2bzMzMIq/5LuDp6SlfX1+7DQAAAACAiuZUCfe5c+d0+PBhBQcHq3Xr1nJ3d1dKSoqtfv/+/Tp+/LiioqIcGCUAAAAAANfm0FPKn332WfXo0UP16tXTiRMnNGnSJLm6uqpfv37y8/PTkCFDNG7cONWsWVO+vr4aNWqUoqKiuEM5AAAAAMDpOTTh/u9//6t+/frpl19+Ue3atdWhQwdt375dtWvXliTNmTNHLi4u6tOnj6xWq2JjY7VgwQJHhgwAAAAAQImUKeGuX7++du7cqYCAALvyrKws3X777frxxx9L1M+KFSuuWu/l5aXExEQlJiaWJUwAAAAAABymTAn30aNHlZeXV6jcarXqp59+uu6gAAAAgPISPv5jR4cA4AZVqoR7zZo1tn9v2LBBfn5+ttd5eXlKSUlReHh4uQUHAAAAAEBlVaqEu1evXpIki8WiuLg4uzp3d3eFh4fr1VdfLbfgAAAAAACorEqVcOfn50uSIiIitHPnTtWqVcuUoAAAAAAAqOzKdA33kSNHyjsOAAAAAACqlDI/FiwlJUUpKSk6deqUbeW7wFtvvXXdgQEAAAAAUJmVKeGeMmWKpk6dqjZt2ig4OFgWi6W84wIAAAAAoFIrU8K9cOFCLV26VAMGDCjveABUUZX9kSxHZ3Z3dAgAAACoZFzK8qaLFy/qrrvuKu9YAAAAAACoMsqUcD/++ONavnx5eccCAAAAAECVUaZTynNzc7Vo0SJt2rRJLVu2lLu7u139a6+9Vi7BAQAAAABQWZUp4f7mm2902223SZL27dtnV8cN1AAAAAAAKGPCvXnz5vKOAwAAAACAKqVM13ADAAAAAICrK9MK97333nvVU8c//fTTMgcEAAAAAEBVUKaEu+D67QKXLl3Snj17tG/fPsXFxZVHXAAAAPiD8PEfOzoEAEAplSnhnjNnTpHlkydP1rlz564rIAAAAAAAqoJyvYb70Ucf1VtvvVWeXQIAAAAAUCmVa8KdmpoqLy+v8uwSAAAAAIBKqUynlPfu3dvutWEYOnnypHbt2qUJEyaUS2AAAAAAAFRmZVrh9vPzs9tq1qypTp066ZNPPtGkSZPKO0YAAOAAM2fOlMVi0ZgxY2xlubm5GjFihAICAuTj46M+ffooMzPTcUECAODEyrTCnZycXN5xAAAAJ7Jz5069+eabatmypV352LFj9fHHH2vlypXy8/PTyJEj1bt3b23bts1BkQIA4LzKlHAXSEtL0/fffy9JatasmVq1alUuQQEAAMc5d+6c+vfvr8WLF2vatGm28uzsbC1ZskTLly9XdHS0pMt/hG/SpIm2b9+uO++801EhAwDglMp0SvmpU6cUHR2ttm3bavTo0Ro9erRat26tzp076/Tp0+UdIwAAqEAjRoxQ9+7dFRMTY1eelpamS5cu2ZVHRkYqLCxMqampFR0mAABOr0wJ96hRo3T27Fl9++23OnPmjM6cOaN9+/YpJydHo0ePLu8YAQBABVmxYoV2796thISEQnUZGRny8PCQv7+/XXlgYKAyMjKK7M9qtSonJ8duAwDgRlGmU8rXr1+vTZs2qUmTJraypk2bKjExUV27di234AAAQMVJT0/X008/rY0bN5bbYz4TEhI0ZcqUcukLAIDKpkwr3Pn5+XJ3dy9U7u7urvz8/OsOCgAAVLy0tDSdOnVKt99+u9zc3OTm5qatW7dq3rx5cnNzU2BgoC5evKisrCy792VmZiooKKjIPuPj45WdnW3b0tPTK2BPAABwDmVKuKOjo/X000/rxIkTtrKffvpJY8eOVefOncstOAAAUHE6d+6svXv3as+ePbatTZs26t+/v+3f7u7uSklJsb1n//79On78uKKioors09PTU76+vnYbAAA3ijKdUv7GG2/ogQceUHh4uEJDQyVdPg2tefPmeuedd8o1QAAAUDGqV6+u5s2b25VVq1ZNAQEBtvIhQ4Zo3Lhxqlmzpnx9fTVq1ChFRUVxh3IAAIpQpoQ7NDRUu3fv1qZNm/TDDz9Ikpo0aVLobqYAAKBqmTNnjlxcXNSnTx9ZrVbFxsZqwYIFjg4LAACnVKqE+9NPP9XIkSO1fft2+fr6qkuXLurSpYuky8/mbNasmRYuXKi7777blGABAEDF2rJli91rLy8vJSYmKjEx0TEBAQBQiZTqGu65c+dq6NChRV5/5efnp+HDh+u1114rt+AAAAAAAKisSpVwf/3117rvvvuKre/atavS0tKuOygAAAAAACq7UiXcmZmZRT4OrICbm5tOnz593UEBAAAAAFDZlSrhvvnmm7Vv375i67/55hsFBweXKZCZM2fKYrFozJgxtrLc3FyNGDFCAQEB8vHxUZ8+fZSZmVmm/gEAAAAAqEilSrj/9Kc/acKECcrNzS1U99tvv2nSpEm6//77Sx3Ezp079eabb6ply5Z25WPHjtXatWu1cuVKbd26VSdOnFDv3r1L3T8AAAAAABWtVHcpf+mll/Thhx/qlltu0ciRI9W4cWNJ0g8//KDExETl5eXpxRdfLFUA586dU//+/bV48WJNmzbNVp6dna0lS5Zo+fLlio6OliQlJyerSZMm2r59O8/7BAAAAAA4tVIl3IGBgfriiy/05JNPKj4+XoZhSJIsFotiY2OVmJiowMDAUgUwYsQIde/eXTExMXYJd1pami5dumT3bO/IyEiFhYUpNTW12ITbarXKarXaXufk5JQqHgAAAAAAykOpEm5Jqlevnj755BP9+uuvOnTokAzDUKNGjVSjRo1Sf/iKFSu0e/du7dy5s1BdRkaGPDw85O/vb1ceGBiojIyMYvtMSEjQlClTSh0LAAAAAADlqdQJd4EaNWqobdu2Zf7g9PR0Pf3009q4caO8vLzK3M+V4uPjNW7cONvrnJwchYaGllv/AAAAQHkJH/9xufV1dGb3cusLQPko1U3TylNaWppOnTql22+/XW5ubnJzc9PWrVs1b948ubm5KTAwUBcvXlRWVpbd+zIzMxUUFFRsv56envL19bXbAAAAAACoaGVe4b5enTt31t69e+3KBg0apMjISL3wwgsKDQ2Vu7u7UlJS1KdPH0nS/v37dfz4cUVFRTkiZAAAAAAASsxhCXf16tXVvHlzu7Jq1aopICDAVj5kyBCNGzdONWvWlK+vr0aNGqWoqCjuUA4AAAAAcHoOS7hLYs6cOXJxcVGfPn1ktVoVGxurBQsWODosmKg8r2MCypPZ302uuwMAAKh6nCrh3rJli91rLy8vJSYmKjEx0TEBAQAAAABQRg67aRoAAAAAAFUZCTcAAAAAACYg4QYAAAAAwAQk3AAAAAAAmICEGwAAAAAAE5BwAwAAAABgAhJuAAAAAABMQMINAAAAAIAJSLgBAAAAADABCTcAAAAAACYg4QYAAAAAwAQk3AAAAAAAmICEGwAAAAAAE5BwAwAAAABgAjdHBwAAAFBVhY//2NEhAAAciBVuAAAAAABMQMINAAAAAIAJSLgBAAAAADABCTcAAAAAACbgpmkAAAAACinvm/4dndm9XPsDKgNWuAEAAAAAMAEJNwAAAAAAJiDhBgAAAADABCTcAAAAAACYgJumAQAASVJSUpKSkpJ09OhRSVKzZs00ceJEdevWTZKUm5urZ555RitWrJDValVsbKwWLFigwMBAB0YNoEB53+QMwPVjhRsAAEiS6tatq5kzZyotLU27du1SdHS0evbsqW+//VaSNHbsWK1du1YrV67U1q1bdeLECfXu3dvBUQMA4LxY4QYAAJKkHj162L2ePn26kpKStH37dtWtW1dLlizR8uXLFR0dLUlKTk5WkyZNtH37dt15552OCBkAAKfGCjcAACgkLy9PK1as0Pnz5xUVFaW0tDRdunRJMTExtjaRkZEKCwtTampqsf1YrVbl5OTYbQAA3ChIuAEAgM3evXvl4+MjT09PPfHEE1q1apWaNm2qjIwMeXh4yN/f3659YGCgMjIyiu0vISFBfn5+ti00NNTkPQAAwHmQcAMAAJvGjRtrz5492rFjh5588knFxcXpu+++K3N/8fHxys7Otm3p6enlGC0AAM6Na7gBAICNh4eHGjZsKElq3bq1du7cqddff12PPPKILl68qKysLLtV7szMTAUFBRXbn6enpzw9Pc0OGwAAp8QKNwAAKFZ+fr6sVqtat24td3d3paSk2Or279+v48ePKyoqyoERAgDgvFjhBgAAki6f/t2tWzeFhYXp7NmzWr58ubZs2aINGzbIz89PQ4YM0bhx41SzZk35+vpq1KhRioqK4g7lAAAUg4QbAABIkk6dOqXHHntMJ0+elJ+fn1q2bKkNGzaoS5cukqQ5c+bIxcVFffr0kdVqVWxsrBYsWODgqAEAcF4OPaU8KSlJLVu2lK+vr3x9fRUVFaV169bZ6nNzczVixAgFBATIx8dHffr0UWZmpgMjBgCg6lqyZImOHj0qq9WqU6dOadOmTbZkW5K8vLyUmJioM2fO6Pz58/rwww+vev02AAA3Oocm3HXr1tXMmTOVlpamXbt2KTo6Wj179tS3334rSRo7dqzWrl2rlStXauvWrTpx4oR69+7tyJABAAAAACgRh55S3qNHD7vX06dPV1JSkrZv3666detqyZIlWr58uaKjoyVJycnJatKkibZv3871YgAAAAAAp+Y0dynPy8vTihUrdP78eUVFRSktLU2XLl1STEyMrU1kZKTCwsKUmprqwEgBAAAAALg2h980be/evYqKilJubq58fHy0atUqNW3aVHv27JGHh4fdsz4lKTAwUBkZGcX2Z7VaZbVaba9zcnLMCh0AAAAAgGI5POFu3Lix9uzZo+zsbH3wwQeKi4vT1q1by9xfQkKCpkyZUo4R4o/Cx3/s6BAAAAAAoFJw+CnlHh4eatiwoVq3bq2EhATdeuutev311xUUFKSLFy8qKyvLrn1mZuZV74gaHx+v7Oxs25aenm7yHgAAAAAAUJjDE+4r5efny2q1qnXr1nJ3d1dKSoqtbv/+/Tp+/LiioqKKfb+np6ftMWMFGwAAAAAAFc2hp5THx8erW7duCgsL09mzZ7V8+XJt2bJFGzZskJ+fn4YMGaJx48apZs2a8vX11ahRoxQVFcUdygEAAAAATs+hCfepU6f02GOP6eTJk/Lz81PLli21YcMGdenSRZI0Z84cubi4qE+fPrJarYqNjdWCBQscGTIAAAAAACXi0IR7yZIlV6338vJSYmKiEhMTKygiAAAAAADKh9Ndww0AAAAAQFVAwg0AAAAAgAlIuAEAAAAAMAEJNwAAAAAAJiDhBgAAAADABCTcAAAAAACYgIQbAAAAAAATOPQ53ACAy8LHf2xa30dndjetbwAAABSPFW4AAAAAAExAwg0AAAAAgAlIuAEAAAAAMAEJNwAAAAAAJiDhBgAAAADABCTcAAAAAACYgIQbAAAAAAATkHADAAAAAGACEm4AAAAAAExAwg0AAAAAgAlIuAEAAAAAMAEJNwAAAAAAJiDhBgAAAADABCTcAAAAAACYwM3RAQAAADiL8PEfOzoEAEAVwgo3AAAAAAAmIOEGAAAAAMAEJNwAAAAAAJiAhBsAAAAAABOQcAMAAAAAYAISbgAAIElKSEhQ27ZtVb16ddWpU0e9evXS/v377drk5uZqxIgRCggIkI+Pj/r06aPMzEwHRQwAgHMj4QYAAJKkrVu3asSIEdq+fbs2btyoS5cuqWvXrjp//rytzdixY7V27VqtXLlSW7du1YkTJ9S7d28HRg0AgPPiOdwAAECStH79ervXS5cuVZ06dZSWlqZ77rlH2dnZWrJkiZYvX67o6GhJUnJyspo0aaLt27frzjvvdETYAAA4LVa4AQBAkbKzsyVJNWvWlCSlpaXp0qVLiomJsbWJjIxUWFiYUlNTi+zDarUqJyfHbgMA4EbBCjcAACgkPz9fY8aMUfv27dW8eXNJUkZGhjw8POTv72/XNjAwUBkZGUX2k5CQoClTppgdLoBKIHz8x+XW19GZ3cutL8BMrHADAIBCRowYoX379mnFihXX1U98fLyys7NtW3p6ejlFCACA82OFGwAA2Bk5cqT+9a9/6bPPPlPdunVt5UFBQbp48aKysrLsVrkzMzMVFBRUZF+enp7y9PQ0O2QAAJySQ1e4efwIAADOwzAMjRw5UqtWrdKnn36qiIgIu/rWrVvL3d1dKSkptrL9+/fr+PHjioqKquhwAQBweg5NuHn8CAAAzmPEiBF65513tHz5clWvXl0ZGRnKyMjQb7/9Jkny8/PTkCFDNG7cOG3evFlpaWkaNGiQoqKiuEM5AABFcOgp5Tx+BAAA55GUlCRJ6tSpk115cnKyBg4cKEmaM2eOXFxc1KdPH1mtVsXGxmrBggUVHCkAAJWDU13DXdrHjxSVcFutVlmtVttrHj8CAEDJGIZxzTZeXl5KTExUYmJiBUQEAEDl5jR3KS/Px4/4+fnZttDQULNDBwAAAACgEKdJuHn8CAAAAACgKnGKU8p5/AgAAAAAoKpx6Ao3jx8BAAAAAFRVDl3hHjFihJYvX66PPvrI9vgR6fJjR7y9ve0eP1KzZk35+vpq1KhRPH4EAAAAAOD0HJpw8/gRAAAAAEBV5dCEm8ePAAAAAACqKqe5SzkAAAAAAFUJCTcAAAAAACYg4QYAAAAAwAQk3AAAAAAAmICEGwAAAAAAE5BwAwAAAABgAoc+FgwAAAAASit8/Mfl2t/Rmd3LtT+gACvcAAAAAACYgIQbAAAAAAATkHADAAAAAGACEm4AAAAAAExAwg0AAAAAgAlIuAEAAAAAMAEJNwAAAAAAJiDhBgAAAADABCTcAAAAAACYgIQbAAAAAAATkHADAAAAAGACEm4AAAAAAExAwg0AAAAAgAncHB0AAADA9Qgf/7GjQwAAoEiscAMAAAAAYAISbgAAAAAATEDCDQAAAACACUi4AQAAAAAwATdNq2K4cQwAAAAAOAdWuAEAAAAAMAEJNwAAAAAAJiDhBgAAAADABCTcAAAAAACYgIQbAABIkj777DP16NFDISEhslgsWr16tV29YRiaOHGigoOD5e3trZiYGB08eNAxwQIAUAmQcAMAAEnS+fPndeuttyoxMbHI+tmzZ2vevHlauHChduzYoWrVqik2Nla5ubkVHCkAAJUDjwUDAACSpG7duqlbt25F1hmGoblz5+qll15Sz549JUlvv/22AgMDtXr1avXt27ciQwUAoFJghRsAAFzTkSNHlJGRoZiYGFuZn5+f2rVrp9TUVAdGBgCA83Jows21YgAAVA4ZGRmSpMDAQLvywMBAW11RrFarcnJy7DYAAG4UDk24uVYMAICqLSEhQX5+frYtNDTU0SEBAFBhHJpwd+vWTdOmTdOf//znQnVXXivWsmVLvf322zpx4kShlXAAAGCuoKAgSVJmZqZdeWZmpq2uKPHx8crOzrZt6enppsYJAIAzcdpruMt6rRinrgEAUP4iIiIUFBSklJQUW1lOTo527NihqKioYt/n6ekpX19fuw0AgBuF096lvKzXiiUkJGjKlCmmxgYAQFV07tw5HTp0yPb6yJEj2rNnj2rWrKmwsDCNGTNG06ZNU6NGjRQREaEJEyYoJCREvXr1clzQAAA4MadNuMsqPj5e48aNs73OycnhejEAAEpg165duvfee22vC+bTuLg4LV26VM8//7zOnz+vYcOGKSsrSx06dND69evl5eXlqJABAHBqTptw//FaseDgYFt5ZmambrvttmLf5+npKU9PT7PDAwCgyunUqZMMwyi23mKxaOrUqZo6dWoFRgUAQOXltNdwl/VaMQAAAAAAnIFDV7i5VgwAAAAAUFU5NOHmWjEAMF/4+I9N7f/ozO6m9g8AAFBZOTTh5loxAAAAAEBV5bQ3TQMAAACAilCeZ4Nx5hf+yGlvmgYAAAAAQGVGwg0AAAAAgAlIuAEAAAAAMAEJNwAAAAAAJiDhBgAAAADABCTcAAAAAACYgMeCAQCuS3k+SqUoPF4FAABUVqxwAwAAAABgAhJuAAAAAABMQMINAAAAAIAJSLgBAAAAADABCTcAAAAAACYg4QYAAAAAwAQ8FgwAAAAAbhDl+ThPHt15baxwAwAAAABgAla4AQBOrTz/Eu8I/PUfAIAbFyvcAAAAAACYgIQbAAAAAAATcEo5AAAAAJST8r4UikuTKjdWuAEAAAAAMAEr3A5Q2W8ABAAAAAC4Nla4AQAAAAAwAQk3AAAAAAAmIOEGAAAAAMAEXMMNAICJynLfjnzrBRMiAQAAFY0VbgAAAAAATEDCDQAAAACACUi4AQAAAAAwAddwAwAAAABKrSz3KakoR2d2d3QIkljhBgAAAADAFCTcAAAAAACYgIQbAAAAAAATkHADAAAAAGCCSnHTtMTERP31r39VRkaGbr31Vs2fP1933HGHaZ/nzBf/AwDgDCp6bgaAGxW5SeXm9Cvc7733nsaNG6dJkyZp9+7duvXWWxUbG6tTp045OjQAAG5IzM0AAJSM0yfcr732moYOHapBgwapadOmWrhwoW666Sa99dZbjg4NAIAbEnMzAAAl49QJ98WLF5WWlqaYmBhbmYuLi2JiYpSamurAyAAAuDExNwMAUHJOfQ33zz//rLy8PAUGBtqVBwYG6ocffijyPVarVVar1fY6OztbkpSTk1Piz823XihDtAAAlI+CecgwDAdHUlhp5+bymJevhXkbAHCl8pxnCvoqy7zs1Al3WSQkJGjKlCmFykNDQx0QDQAAZffLL7/Iz8/P0WFcF+ZlAIAj+M0t/z7Pnj1b6nnZqRPuWrVqydXVVZmZmXblmZmZCgoKKvI98fHxGjdunO11fn6+zpw5o4CAAFkslkLtc3JyFBoaqvT0dPn6+pbvDtygGNPyxXiWP8a0fDGe5S87O1thYWGqWbOmo0MppLRzc2nn5cqO34drY4xKhnG6Nsbo2hijayvJGBmGobNnzyokJKTU/Tt1wu3h4aHWrVsrJSVFvXr1knR5ok5JSdHIkSOLfI+np6c8PT3tyvz9/a/5Wb6+vnwJyxljWr4Yz/LHmJYvxrP8ubg4361WSjs3l3Veruz4fbg2xqhkGKdrY4yujTG6tmuNUVnPOHPqhFuSxo0bp7i4OLVp00Z33HGH5s6dq/Pnz2vQoEGODg0AgBsSczMAACXj9An3I488otOnT2vixInKyMjQbbfdpvXr1xe6WQsAAKgYzM0AAJSM0yfckjRy5MhiTyG/Xp6enpo0aVKh091Qdoxp+WI8yx9jWr4Yz/JXGcbUzLm5MqsMPztHY4xKhnG6Nsbo2hijazN7jCyGMz5zBAAAAACASs757sYCAAAAAEAVQMINAAAAAIAJSLgBAAAAADDBDZNwT548WRaLxW6LjIy01efm5mrEiBEKCAiQj4+P+vTpo8zMTAdG7Fw+++wz9ejRQyEhIbJYLFq9erVdvWEYmjhxooKDg+Xt7a2YmBgdPHjQrs2ZM2fUv39/+fr6yt/fX0OGDNG5c+cqcC+cy7XGdODAgYW+s/fdd59dG8b0fxISEtS2bVtVr15dderUUa9evbR//367NiX5PT9+/Li6d++um266SXXq1NFzzz2n33//vSJ3xSmUZDw7depU6Dv6xBNP2LVhPP8nKSlJLVu2tD3nMyoqSuvWrbPV8/2svGbOnCmLxaIxY8bYyjiuuOynn37So48+qoCAAHl7e6tFixbatWuXrb4kxw9VWV5eniZMmKCIiAh5e3urQYMGevnll/XHWyzdaGPEMee1XW2MLl26pBdeeEEtWrRQtWrVFBISoscee0wnTpyw66Oqj5F07e/SHz3xxBOyWCyaO3euXXl5jNMNk3BLUrNmzXTy5Enb9vnnn9vqxo4dq7Vr12rlypXaunWrTpw4od69ezswWudy/vx53XrrrUpMTCyyfvbs2Zo3b54WLlyoHTt2qFq1aoqNjVVubq6tTf/+/fXtt99q48aN+te//qXPPvtMw4YNq6hdcDrXGlNJuu++++y+s//4xz/s6hnT/9m6datGjBih7du3a+PGjbp06ZK6du2q8+fP29pc6/c8Ly9P3bt318WLF/XFF19o2bJlWrp0qSZOnOiIXXKokoynJA0dOtTuOzp79mxbHeNpr27dupo5c6bS0tK0a9cuRUdHq2fPnvr2228l8f2srHbu3Kk333xTLVu2tCvnuEL69ddf1b59e7m7u2vdunX67rvv9Oqrr6pGjRq2NiU5fqjKZs2apaSkJL3xxhv6/vvvNWvWLM2ePVvz58+3tbnRxohjzmu72hhduHBBu3fv1oQJE7R79259+OGH2r9/vx544AG7dlV9jKSSHWtL0qpVq7R9+3aFhIQUqiuXcTJuEJMmTTJuvfXWIuuysrIMd3d3Y+XKlbay77//3pBkpKamVlCElYckY9WqVbbX+fn5RlBQkPHXv/7VVpaVlWV4enoa//jHPwzDMIzvvvvOkGTs3LnT1mbdunWGxWIxfvrppwqL3VldOaaGYRhxcXFGz549i30PY3p1p06dMiQZW7duNQyjZL/nn3zyieHi4mJkZGTY2iQlJRm+vr6G1Wqt2B1wMleOp2EYRseOHY2nn3662PcwntdWo0YN429/+xvfz0rq7NmzRqNGjYyNGzfa/T5wXHHZCy+8YHTo0KHY+pIcP1R13bt3NwYPHmxX1rt3b6N///6GYTBGHHNeW1HHkFf68ssvDUnGsWPHDMO48cbIMIofp//+97/GzTffbOzbt8+oV6+eMWfOHFtdeY3TDbXCffDgQYWEhKh+/frq37+/jh8/LklKS0vTpUuXFBMTY2sbGRmpsLAwpaamOircSuPIkSPKyMiwGz8/Pz+1a9fONn6pqany9/dXmzZtbG1iYmLk4uKiHTt2VHjMlcWWLVtUp04dNW7cWE8++aR++eUXWx1jenXZ2dmSpJo1a0oq2e95amqqWrRoocDAQFub2NhY5eTk2FYhb1RXjmeBd999V7Vq1VLz5s0VHx+vCxcu2OoYz+Ll5eVpxYoVOn/+vKKiovh+VlIjRoxQ9+7d7X5uEscVBdasWaM2bdrooYceUp06ddSqVSstXrzYVl+S44eq7q677lJKSooOHDggSfr666/1+eefq1u3bpIYoytxzFk22dnZslgs8vf3l8QYFcjPz9eAAQP03HPPqVmzZoXqy2uc3Mol2kqgXbt2Wrp0qRo3bqyTJ09qypQpuvvuu7Vv3z5lZGTIw8PD9iUsEBgYqIyMDMcEXIkUjNEfDwILXhfUZWRkqE6dOnb1bm5uqlmzJmNcjPvuu0+9e/dWRESEDh8+rL/85S/q1q2bUlNT5erqypheRX5+vsaMGaP27durefPmklSi3/OMjIwiv8cFdTeqosZTkv7v//5P9erVU0hIiL755hu98MIL2r9/vz788ENJjGdR9u7dq6ioKOXm5srHx0erVq1S06ZNtWfPHr6flcyKFSu0e/du7dy5s1AdxxWX/fjjj0pKStK4ceP0l7/8RTt37tTo0aPl4eGhuLi4Eh0/VHXjx49XTk6OIiMj5erqqry8PE2fPl39+/eXVLJjrBsJx5yll5ubqxdeeEH9+vWTr6+vJMaowKxZs+Tm5qbRo0cXWV9e43TDJNwFfymUpJYtW6pdu3aqV6+e3n//fXl7ezswMqBoffv2tf27RYsWatmypRo0aKAtW7aoc+fODozM+Y0YMUL79u2zu08Dyq648fzjNUwtWrRQcHCwOnfurMOHD6tBgwYVHWal0LhxY+3Zs0fZ2dn64IMPFBcXp61btzo6LJRSenq6nn76aW3cuFFeXl6ODsdp5efnq02bNpoxY4YkqVWrVtq3b58WLlyouLg4B0fnHN5//329++67Wr58uZo1a6Y9e/ZozJgxCgkJYYxw3S5duqSHH35YhmEoKSnJ0eE4lbS0NL3++uvavXu3LBaLqZ91Q51S/kf+/v665ZZbdOjQIQUFBenixYvKysqya5OZmamgoCDHBFiJFIzRlXdf/eP4BQUF6dSpU3b1v//+u86cOcMYl1D9+vVVq1YtHTp0SBJjWpyRI0fqX//6lzZv3qy6devaykvyex4UFFTk97ig7kZU3HgWpV27dpJk9x1lPO15eHioYcOGat26tRISEnTrrbfq9ddf5/tZyaSlpenUqVO6/fbb5ebmJjc3N23dulXz5s2Tm5ubAgMDOa6QFBwcrKZNm9qVNWnSxHZJX0mOH6q65557TuPHj1ffvn3VokULDRgwQGPHjlVCQoIkxuhKHHOWXEGyfezYMW3cuNG2ui0xRpL0n//8R6dOnVJYWJjt//Fjx47pmWeeUXh4uKTyG6cbNuE+d+6cDh8+rODgYLVu3Vru7u5KSUmx1e/fv1/Hjx9XVFSUA6OsHCIiIhQUFGQ3fjk5OdqxY4dt/KKiopSVlaW0tDRbm08//VT5+fm2g3Rc3X//+1/98ssvCg4OlsSYXskwDI0cOVKrVq3Sp59+qoiICLv6kvyeR0VFae/evXb/uRZMUlceNFZ11xrPouzZs0eS7L6jjOfV5efny2q18v2sZDp37qy9e/dqz549tq1Nmzbq37+/7d8cV0jt27cv9DjBAwcOqF69epJKdvxQ1V24cEEuLvaH466ursrPz5fEGF2JY86SKUi2Dx48qE2bNikgIMCunjGSBgwYoG+++cbu//GQkBA999xz2rBhg6RyHKcy3+qtknnmmWeMLVu2GEeOHDG2bdtmxMTEGLVq1TJOnTplGIZhPPHEE0ZYWJjx6aefGrt27TKioqKMqKgoB0ftPM6ePWt89dVXxldffWVIMl577TXjq6++st3tcObMmYa/v7/x0UcfGd98843Rs2dPIyIiwvjtt99sfdx3331Gq1atjB07dhiff/650ahRI6Nfv36O2iWHu9qYnj171nj22WeN1NRU48iRI8amTZuM22+/3WjUqJGRm5tr64Mx/Z8nn3zS8PPzM7Zs2WKcPHnStl24cMHW5lq/57///rvRvHlzo2vXrsaePXuM9evXG7Vr1zbi4+MdsUsOda3xPHTokDF16lRj165dxpEjR4yPPvrIqF+/vnHPPffY+mA87Y0fP97YunWrceTIEeObb74xxo8fb1gsFuPf//63YRh8Pyu7K+/az3HF5Tsju7m5GdOnTzcOHjxovPvuu8ZNN91kvPPOO7Y2JTl+qMri4uKMm2++2fjXv/5lHDlyxPjwww+NWrVqGc8//7ytzY02RhxzXtvVxujixYvGAw88YNStW9fYs2eP3Rz+xydaVPUxMoxrf5eudOVdyg2jfMbphkm4H3nkESM4ONjw8PAwbr75ZuORRx4xDh06ZKv/7bffjKeeesqoUaOGcdNNNxl//vOfjZMnTzowYueyefNmQ1KhLS4uzjCMy49pmDBhghEYGGh4enoanTt3Nvbv32/Xxy+//GL069fP8PHxMXx9fY1BgwYZZ8+edcDeOIerjemFCxeMrl27GrVr1zbc3d2NevXqGUOHDrV7HJBhMKZ/VNRYSjKSk5NtbUrye3706FGjW7duhre3t1GrVi3jmWeeMS5dulTBe+N41xrP48ePG/fcc49Rs2ZNw9PT02jYsKHx3HPPGdnZ2Xb9MJ7/M3jwYKNevXqGh4eHUbt2baNz5862ZNsw+H5Wdlcm3BxXXLZ27VqjefPmhqenpxEZGWksWrTIrr4kxw9VWU5OjvH0008bYWFhhpeXl1G/fn3jxRdftEuMbrQx4pjz2q42RkeOHCl2Dt+8ebOtj6o+RoZx7e/SlYpKuMtjnCyGYRglXw8HAAAAAAAlccNeww0AAAAAgJlIuAEAAAAAMAEJNwAAAAAAJiDhBgAAAADABCTcAAAAAACYgIQbAAAAAAATkHADAAAAAGACEm4AAAAAAExAwg3AYQYOHKhevXo5OgwAACDmZcAMJNzADcDRE+jRo0dlsVi0Z88eh8UAAICzYF4Gbhwk3AAAAAAAmICEG7jB7du3T926dZOPj48CAwM1YMAA/fzzz7b6Tp06afTo0Xr++edVs2ZNBQUFafLkyXZ9/PDDD+rQoYO8vLzUtGlTbdq0SRaLRatXr5YkRURESJJatWoli8WiTp062b3/lVdeUXBwsAICAjRixAhdunTJzF0GAMBpMS8DVQsJN3ADy8rKUnR0tFq1aqVdu3Zp/fr1yszM1MMPP2zXbtmyZapWrZp27Nih2bNna+rUqdq4caMkKS8vT7169dJNN92kHTt2aNGiRXrxxRft3v/ll19KkjZt2qSTJ0/qww8/tNVt3rxZhw8f1ubNm7Vs2TItXbpUS5cuNXfHAQBwQszLQNXj5ugAADjOG2+8oVatWmnGjBm2srfeekuhoaE6cOCAbrnlFklSy5YtNWnSJElSo0aN9MYbbyglJUVdunTRxo0bdfjwYW3ZskVBQUGSpOnTp6tLly62PmvXri1JCggIsLUpUKNGDb3xxhtydXVVZGSkunfvrpSUFA0dOtTUfQcAwNkwLwNVDwk3cAP7+uuvtXnzZvn4+BSqO3z4sN3E/kfBwcE6deqUJGn//v0KDQ21m7DvuOOOEsfQrFkzubq62vW9d+/eUu0HAABVAfMyUPWQcAM3sHPnzqlHjx6aNWtWobrg4GDbv93d3e3qLBaL8vPzyyUGM/sGAKAyYV4Gqh4SbuAGdvvtt+uf//ynwsPD5eZWtv8OGjdurPT0dGVmZiowMFCStHPnTrs2Hh4eki5fVwYAAIrGvAxUPdw0DbhBZGdna8+ePXbbsGHDdObMGfXr1087d+7U4cOHtWHDBg0aNKjEk3CXLl3UoEEDxcXF6ZtvvtG2bdv00ksvSbr8V3FJqlOnjry9vW03f8nOzjZtPwEAqAyYl4EbAwk3cIPYsmWLWrVqZbe9/PLL2rZtm/Ly8tS1a1e1aNFCY8aMkb+/v1xcSvbfg6urq1avXq1z586pbdu2evzxx213Q/Xy8pIkubm5ad68eXrzzTcVEhKinj17mrafAABUBszLwI3BYhiG4eggAFQt27ZtU4cOHXTo0CE1aNDA0eEAAHBDY14GHIeEG8B1W7VqlXx8fNSoUSMdOnRITz/9tGrUqKHPP//c0aEBAHDDYV4GnAc3TQNw3c6ePasXXnhBx48fV61atRQTE6NXX33V0WEBAHBDYl4GnAcr3AAAAAAAmICbpgEAAAAAYAISbgAAAAAATEDCDQAAAACACUi4AQAAAAAwAQk3AAAAAAAmIOEGAAAAAMAEJNwAAAAAAJiAhBsAAAAAABOQcAMAAAAAYIL/B8G6ZsQApjvoAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "fig, axes = plt.subplots(1, 2, figsize=(10, 5) )\n", + "axes[0].hist(NeW_Len_SciTLDR_test_Text, bins = 20 )\n", + "axes[0].set_title(\"SciTLDR_test_Text Length\")\n", + "axes[0].set_xlabel(\"Length\")\n", + "axes[0].set_ylabel(\"Count\")\n", + "axes[0].set_xlim((45 ,300))\n", + "\n", + "\n", + "axes[1].hist(NeW_Len_SciTLDR_test_Summary, bins = 20 )\n", + "axes[1].set_title(\"SciTLDR_test_Summary Length\")\n", + "axes[1].set_xlabel(\"Length\")\n", + "plt.tight_layout()\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "736e3953", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "d6ac8c3b", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1bd7fb1b", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 570, + "id": "1e2f6348", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA90AAAHqCAYAAAAZLi26AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy88F64QAAAACXBIWXMAAA9hAAAPYQGoP6dpAABV/UlEQVR4nO3deVyU5f7/8fewIwiIC4sioJG4lqkZamWKkplL2mLHCjse7XTQXCqTykxPhnlOZhZqdUpbNMtK2456DJey0MzSNNPU3NJAywC1QIPr90c/59sIKOjczuLr+XjM48Fc9zX3fK65B655c8993zZjjBEAAAAAAHA6H1cXAAAAAACAtyJ0AwAAAABgEUI3AAAAAAAWIXQDAAAAAGARQjcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFiF0AwAAAABgEUI3AAAAAAAWIXQDlejcubM6d+7s6jI8yqOPPiqbzebqMiApISFB119/vavLAHABYL6sPuZLnKuVK1fKZrPprbfecnUpqAJCN7zKpk2bdOONNyo+Pl5BQUGqX7++unXrpmeeeeac1tu5c2fZbLYz3h599FFJVQs8gwYNcnhsaGioGjVqpBtvvFFvv/22ysrKzlhHcHCwWrVqpWnTplXYvzIHDx6Un5+fbrvttkr7HDlyRMHBwerXr1+V11sdu3fvrtJrarPZtHv3bqc855YtW/Too49WeX0nPxT99NNPTnl+Z6vueADgJObLqnGH+fLPVq9erR49eqh+/foKCgpSw4YN1atXL82bN8/y5/ZW7v5P6nnz5mnatGmuLgPnyM/VBQDO8tlnn+maa65Rw4YNNWTIEEVHR2vfvn1as2aNnn76aQ0fPrxa6/vf//5n//mhhx7S3/72N/v9devWafr06XrwwQfVtGlTe3urVq2q9RyBgYH6z3/+I0n67bfftGfPHr3//vu68cYb1blzZ7377rsKCwtzeEyDBg2UlZUlSfrpp580b948jRo1SocOHdKkSZOq9Lz16tVTt27d9O677+rXX39VjRo1yvV55513VFxcfNoPGueibt26evXVVx3annzySf3www966qmnyvV1hi1btmjChAnq3LmzEhISnLJOV/K28QA4P5gvPWu+PGnBggW65ZZbdOmll2rEiBGqVauWdu3apY8//lgvvPCC/vKXv1j6/HCNefPmafPmzRo5cqSrS8G5MICXuO6660zdunXNL7/8Um5Zfn6+U59rwYIFRpJZsWJFhcvj4+NNz549T7uO9PR0ExISUuGyrKwsI8ncfPPNDu1XX321ad68uUPbb7/9ZuLj403NmjXN77//XuUxvPrqq0aSef311ytc3r17dxMeHm6Ki4urvM7x48ebc/mz0rNnTxMfH3/Wjz+TM223U50cz6FDhyyr6VycbjxVeQ8CuDAxX3rmfNmsWTPTvHlzU1JSUm6Zs7ebJygtLTW//fbbOa/H3efLyj4brVixwkgyCxYsOP9Fodr4ejm8xs6dO9W8eXNFRESUW1avXr1yba+99pouv/xy1ahRQ7Vq1dJVV13l8N96Vx6jNnbsWHXv3l0LFizQd999d9q+QUFBateunY4cOaKDBw9W+TluuOEGhYSEVPiVtIMHDyonJ0c33nijAgMD9cknn+imm25Sw4YNFRgYqLi4OI0aNUq//fZbtcdWXSUlJRo/frwuuugi+3OPGTNGJSUl9j7p6ekKCgrSt99+6/DYtLQ01apVSwcOHNCcOXN00003SZKuueYa+1cOV65cec41bt26VTfeeKMiIyMVFBSktm3b6r333nPoM2fOHNlsNn366acaPXq06tatq5CQEN1www06dOiQQ9+ysjI9+uijio2NVY0aNXTNNddoy5YtSkhI0KBBg+zrq8p4Vq9ercsvv1xBQUFq1KiRXnnllXMeLwDPxnzpmfPlzp071a5dOwUEBJRb9uftdvJY31Png5OHdc2ZM8feNmjQIIWGhmrv3r26/vrrFRoaqvr16ys7O1vSH4chdOnSRSEhIYqPjy/3Gpyc21avXq177rlHdevWVUREhO666y4dP35cBQUFuuOOO1SrVi3VqlVLY8aMkTHGYR3//ve/1aFDB9WuXVvBwcFq06ZNhccp22w2DRs2THPnzlXz5s0VGBioxYsXKyEhQX369CnXv7i4WOHh4brrrrvO+NpWxWuvvaY2bdooODhYkZGRGjBggPbt2+fQp3PnzmrRooW2bNmia665RjVq1FD9+vU1ZcqUcuvbs2ePevfurZCQENWrV0+jRo3S0qVLHbZd586d9eGHH2rPnj32ef7Ub7aVlZVp0qRJatCggYKCgtS1a1ft2LHDKWOG8xC64TXi4+O1fv16bd68+Yx9J0yYoNtvv13+/v6aOHGiJkyYoLi4OC1fvvw8VFo1t99+u4wxWrZs2Rn7npxIK/oAVZmQkBD16dNHS5cu1eHDhx2WvfHGGyotLdXAgQMl/fGVtl9//VV33323nnnmGaWlpemZZ57RHXfcUa0xVVdZWZl69+6tf//73+rVq5eeeeYZ9e3bV0899ZRuueUWe7+nn35adevWVXp6ukpLSyVJzz33nP73v//pmWeeUWxsrK666irdc889kqQHH3xQr776ql599VWHrzuejW+++UZXXHGFvv32W40dO1ZPPvmkQkJC1LdvXy1cuLBc/+HDh2vjxo0aP3687r77br3//vsaNmyYQ5/MzExNmDBBbdu21b/+9S8lJSUpLS1Nx44ds/epynh27NihG2+8Ud26ddOTTz6pWrVqadCgQfrmm2/OacwAPBvzpWfOl/Hx8crJydEPP/xwzuv6s9LSUvXo0UNxcXGaMmWKEhISNGzYMM2ZM0fXXnut2rZtqyeeeEI1a9bUHXfcoV27dpVbx/Dhw7V9+3ZNmDBBvXv31vPPP69x48apV69eKi0t1eOPP65OnTrpX//6V7lDy55++mm1bt1aEydO1OOPPy4/Pz/ddNNN+vDDD8s9z/LlyzVq1Cjdcsstevrpp5WYmKjbbrtNixcvLrdt3n//fRUVFTnla/+TJk3SHXfcoaSkJE2dOlUjR45UTk6OrrrqKhUUFDj0/eWXX3Tttdfqkksu0ZNPPqnk5GQ98MADWrx4sb3PsWPH1KVLF3300Ue655579NBDD+mzzz7TAw884LCuhx56SJdeeqnq1Kljn+dPPb578uTJWrhwoe677z5lZmZqzZo19vcj3IiL97QDTvO///3P+Pr6Gl9fX5OSkmLGjBljli5dao4fP+7Qb/v27cbHx8fccMMNprS01GFZWVmZ/eerr77aXH311RU+l9VflzPGmK+++spIMqNGjXKoKTk52Rw6dMgcOnTIbN261dx///1G0ll9NerDDz80ksxzzz3n0H7FFVeY+vXr21+fX3/9tdxjs7KyjM1mM3v27LG3Ofvr5a+++qrx8fExn3zyiUO/WbNmGUnm008/tbctXbrUSDKPPfaY+f77701oaKjp27evw+Os+Hp5165dTcuWLR2+VlhWVmY6dOhgkpKS7G2zZ882kkxqaqrD+2zUqFHG19fXFBQUGGOMycvLM35+fuVqf/TRR40kk56eXqXxxMfHG0nm448/trcdPHjQBAYGmnvvvbdK4wfgnZgvPXO+fPHFF40kExAQYK655hozbtw488knn5TbNie/dnzqa75r1y4jycyePdvelp6ebiSZxx9/3N72yy+/mODgYGOz2cz8+fPt7Vu3bjWSzPjx4+1tJ+e2tLQ0h/dESkqKsdls5u9//7u97ffffzcNGjQo91459TU7fvy4adGihenSpYtDuyTj4+NjvvnmG4f2bdu2GUlm5syZDu29e/c2CQkJDnVV5Ezvwd27dxtfX18zadIkh/ZNmzYZPz8/h/arr77aSDKvvPKKva2kpMRER0eb/v3729uefPJJI8ksWrTI3vbbb7+Z5OTkctvuTF8vb9q0qcMhB08//bSRZDZt2nTaceP8Yk83vEa3bt2Um5ur3r17a+PGjZoyZYrS0tJUv359h6/6Llq0SGVlZXrkkUfk4+P4K+BOl+8IDQ2V9MdZUf9s69atqlu3rurWravk5GT961//Uu/evR2+LlZV3bt3V926dR2+LrZr1y6tWbNGt956q/31CQ4Oti8/duyYfvrpJ3Xo0EHGGH311VdnMbqqWbBggZo2bark5GT99NNP9luXLl0kSStWrHAYy1133aWJEyeqX79+CgoK0nPPPWdZbZJ0+PBhLV++XDfffLOOHDlir+/nn39WWlqatm/frv379zs8ZujQoQ7vsyuvvFKlpaXas2ePJCknJ0e///67/vGPfzg8rronNpKkZs2a6corr7Tfr1u3rpo0aaLvv/++2usC4D2YL+dU+zncYb7861//qiVLlqhz585avXq1/vnPf+rKK69UUlKSPvvss3Na959PfhcREaEmTZooJCREN998s729SZMmioiIqHAOGTx4sMN7on379jLGaPDgwfY2X19ftW3bttzj//ya/fLLLyosLNSVV16pL7/8stzzXH311WrWrJlD28UXX6z27dtr7ty59rbDhw9r8eLFGjhw4Dm/V9955x2VlZXp5ptvdvgsEh0draSkJIfPItIf78c/710PCAjQ5Zdf7jDuJUuWqH79+urdu7e9LSgoSEOGDKl2fXfeeafDIQcn533mevdC6IZXadeund555x398ssv+vzzz5WZmakjR47oxhtv1JYtWyT9cUyUj49PuT/a7ubo0aOSpJo1azq0JyQkaNmyZVq6dKlmzJih+vXr69ChQwoKCqr2c/j5+emWW27RJ598Yg+HJz9Q/PmrSXv37tWgQYMUGRmp0NBQ1a1bV1dffbUkqbCw8KzGVxXbt2/XN998Y//QdPJ28cUXS1K5Y/L+/e9/KzIyUhs2bND06dMrPDbRmXbs2CFjjMaNG1euxvHjx1dYY8OGDR3u16pVS9IfHzQk2cP3RRdd5NAvMjLS3reqTn2uk8938rkAXLiYL6vHXebLtLQ0LV26VAUFBfr444+VkZGhPXv26Prrr6/Wcep/FhQUVO4qIeHh4WrQoEG5wBoeHl7hHHLqfBMeHi5JiouLO+PjP/jgA11xxRUKCgpSZGSk6tatq5kzZ1b4eiUmJlY4hjvuuEOffvqpfQ5dsGCBTpw4odtvv73C/tWxfft2GWOUlJRUbq7/9ttvy73uFb1up869e/bsUePGjcv1O3Xur4ozfa6Ae+CSYfBKAQEBateundq1a6eLL75Yd955pxYsWGAPQp7g5LF2p/4BDgkJUWpqqv1+x44dddlll+nBBx/U9OnTq/08t912m5599lm9/vrruu+++/T666+rWbNmuvTSSyX9caxXt27ddPjwYT3wwANKTk5WSEiI9u/fr0GDBlXreqfVVVZWppYtW2rq1KkVLj91Mv/qq6/sk9+mTZt06623Wlbbyfok6b777lNaWlqFfU7dfr6+vhX2M6ecWMYZzudzAfBMzJdV507zZY0aNXTllVfqyiuvVJ06dTRhwgQtXrxY6enple7ZPXnOk1NVNldUZw6pzjr+/PhPPvlEvXv31lVXXaUZM2YoJiZG/v7+mj17doUnrvvzXvE/GzBggEaNGqW5c+fqwQcf1Guvvaa2bduqSZMmFfavjrKyMtlsNi1evLjC8Zz8psVJ53vuZa73DIRueL22bdtKkn788UdJUuPGjVVWVqYtW7bYJ0p39Oqrr8pms6lbt26n7deqVSvddttteu6553TfffdVuHfzdNq3b6/GjRtr3rx56tatm7755huH65du2rRJ3333nV5++WWHE8FU5YQ156px48bauHGjunbtesavhx07dkx33nmnmjVrpg4dOmjKlCm64YYb1K5dO3sfZ38dslGjRpIkf39/hw925yI+Pl7SH3vR//wf/Z9//rncf63d6eudADwf8+Xpuet8eep2O7mn89QTfJ3cC+xO3n77bQUFBWnp0qUKDAy0t8+ePbta64mMjFTPnj01d+5cDRw4UJ9++mm5E46drcaNG8sYo8TERPs37c5VfHy8tmzZImOMw1xe0VnHmeu9A18vh9dYsWJFhf/V++9//ytJ9v929u3bVz4+Ppo4cWK5/zq7y38FJ0+erP/973+65ZZblJSUdMb+Y8aM0YkTJyrdI3wmAwcO1FdffaXx48fLZrPpL3/5i33Zyf+g/vm1Mcbo6aefPqvnqo6bb75Z+/fv1wsvvFBu2W+//eZwNu8HHnhAe/fu1csvv6ypU6cqISFB6enpDpcWCwkJkVT+g8jZqlevnjp37qznnnvO/mHnz069FFhVdO3aVX5+fpo5c6ZD+7PPPluur7PHA+DCwHzpmfNlTk5Ohe2nbrf4+Hj5+vrq448/dug3Y8YMp9ThTL6+vrLZbA574Xfv3q1FixZVe1233367tmzZovvvv1++vr4aMGCAU2rs16+ffH19NWHChHLve2OMfv7552qvMy0tTfv373c4h0JxcXGFn3dCQkIsPZQP5wd7uuE1hg8frl9//VU33HCDkpOTdfz4cX322Wd64403lJCQoDvvvFPSH18/e+ihh+wnIOnXr58CAwO1bt06xcbGKisryyn17NixQ4899li59tatW6tnz56SpN9//12vvfaapD/+2O7Zs0fvvfeevv76a11zzTV6/vnnq/RczZo103XXXaf//Oc/GjdunGrXrl2tWm+77TZNnDhR7777rjp27OhwDcjk5GQ1btxY9913n/bv36+wsDC9/fbb5+VYodtvv11vvvmm/v73v2vFihXq2LGjSktLtXXrVr355ptaunSp2rZtq+XLl2vGjBkaP368LrvsMkl//Je8c+fOGjdunP36mJdeeql8fX31xBNPqLCwUIGBgerSpcsZj/2eOnWqatSo4dDm4+OjBx98UNnZ2erUqZNatmypIUOGqFGjRsrPz1dubq5++OEHbdy4sVpjjoqK0ogRI/Tkk0+qd+/euvbaa7Vx40YtXrxYderUcfiP99mOB8CFjfnSM+fLPn36KDExUb169VLjxo117NgxffTRR3r//ffVrl079erVS9Ifx03fdNNNeuaZZ2Sz2dS4cWN98MEHZ33Mt5V69uypqVOn6tprr9Vf/vIXHTx4UNnZ2brooov09ddfV3tdtWvX1oIFC9SjR49qzYVneg8+9thjyszM1O7du9W3b1/VrFlTu3bt0sKFCzV06FDdd9991ar1rrvu0rPPPqtbb71VI0aMUExMjObOnWs/38Cf5/o2bdrojTfe0OjRo9WuXTuFhobatzU8yHk8UzpgqcWLF5u//vWvJjk52YSGhpqAgABz0UUXmeHDh5v8/Pxy/V966SXTunVrExgYaGrVqmWuvvpqs2zZMvvyc70EiqQKb4MHDzbG/N9lOk7eatSoYRISEkz//v3NW2+9Ve4SICdrat68eYXPuXLlynKX8qiOdu3aGUlmxowZ5ZZt2bLFpKammtDQUFOnTh0zZMgQs3HjxnKXHnH2JcOM+ePSIU888YRp3ry5fVu1adPGTJgwwRQWFpqioiITHx9vLrvsMnPixAmHx44aNcr4+PiY3Nxce9sLL7xgGjVqZHx9fc94+bCT46no5uvra++3c+dOc8cdd5jo6Gjj7+9v6tevb66//nrz1ltv2fucvKzKunXrHJ6joku7/P7772bcuHEmOjraBAcHmy5duphvv/3W1K5d2+HyK6cbT2WXQDnd+xrAhYH50jPny9dff90MGDDANG7c2AQHB5ugoCDTrFkz89BDD5mioiKHvocOHTL9+/c3NWrUMLVq1TJ33XWX2bx5c4WXDKvocmyVvX6nzi2VzW2VXXKzoud78cUXTVJSkgkMDDTJyclm9uzZFb4+kkxGRsZpX6N//OMfRpKZN2/eafudOqYzvQeNMebtt982nTp1MiEhISYkJMQkJyebjIwMs23bNnufyl639PT0cp9vvv/+e9OzZ08THBxs6tata+69917z9ttvG0lmzZo19n5Hjx41f/nLX0xERISRZF/Pyc8PCxYscFhvRZeGg+vZjHGT7wcBACpVUFCgWrVq6bHHHtNDDz3k6nIAAHA7o0aN0osvvqi8vLxy31DzBNOmTdOoUaP0ww8/qH79+q4uB07EMd0A4GZ+++23cm0nTwjTuXPn81sMAAAeoLi4WK+99pr69+/vEYH71Lm+uLhYzz33nJKSkgjcXohjugEvdOjQoUovDSL9cYmYyMhIS2soLCysMDz+WXR0tKU1eKo33nhDc+bM0XXXXafQ0FCtXr1ar7/+urp3766OHTu6ujwA8BrMl57v4MGD+uijj/TWW2/p559/1ogRI1xdUpX069dPDRs21KWXXqrCwkK99tpr2rp1q+bOnevq0mABQjfghdq1a3faS4NcffXVWrlypaU1jBgxQi+//PJp+3B0S8VatWolPz8/TZkyRUVFRfaTq1V0khcAwNljvvR8W7Zs0cCBA1WvXj1Nnz7drS9v92dpaWn6z3/+o7lz56q0tFTNmjXT/Pnzdcstt7i6NFiAY7oBL/Tpp5+e9r/mtWrVUps2bSytYcuWLTpw4MBp+zjr2tYAAJwN5ksA5wOhGwAAAAAAi3AiNQAAAAAALMIx3ZLKysp04MAB1axZ0+Fi9AAAuCtjjI4cOaLY2Fj5+Hj2/9CZhwEAnqiqczGhW9KBAwcUFxfn6jIAAKi2ffv2qUGDBq4u45wwDwMAPNmZ5mJCt6SaNWtK+uPFCgsLc3E1AACcWVFRkeLi4uxzmCdjHgYAeKKqzsWEbsn+VbawsDAmewCAR/GGr2MzDwMAPNmZ5mLPPggMAAAAAAA3RugGAAAAAMAihG4AAAAAACxC6AYAAAAAwCIuDd0ff/yxevXqpdjYWNlsNi1atMhhuTFGjzzyiGJiYhQcHKzU1FRt377doc/hw4c1cOBAhYWFKSIiQoMHD9bRo0fP4ygAAAAAAKiYS0P3sWPHdMkllyg7O7vC5VOmTNH06dM1a9YsrV27ViEhIUpLS1NxcbG9z8CBA/XNN99o2bJl+uCDD/Txxx9r6NCh52sIAAAAAABUymaMMa4uQvrjNOsLFy5U3759Jf2xlzs2Nlb33nuv7rvvPklSYWGhoqKiNGfOHA0YMEDffvutmjVrpnXr1qlt27aSpCVLlui6667TDz/8oNjY2Co9d1FRkcLDw1VYWMilSgAAHsGb5i5vGgsA4MJR1fnLbY/p3rVrl/Ly8pSammpvCw8PV/v27ZWbmytJys3NVUREhD1wS1Jqaqp8fHy0du3a814zAAAAAAB/5ufqAiqTl5cnSYqKinJoj4qKsi/Ly8tTvXr1HJb7+fkpMjLS3qciJSUlKikpsd8vKipyVtkAAAAAANi57Z5uK2VlZSk8PNx+i4uLc3VJAAAAAAAv5LahOzo6WpKUn5/v0J6fn29fFh0drYMHDzos//3333X48GF7n4pkZmaqsLDQftu3b5+TqwcAAAAAwI1Dd2JioqKjo5WTk2NvKyoq0tq1a5WSkiJJSklJUUFBgdavX2/vs3z5cpWVlal9+/aVrjswMFBhYWEONwAAAAAAnM2lx3QfPXpUO3bssN/ftWuXNmzYoMjISDVs2FAjR47UY489pqSkJCUmJmrcuHGKjY21n+G8adOmuvbaazVkyBDNmjVLJ06c0LBhwzRgwIAqn7kcAAAAAACruDR0f/HFF7rmmmvs90ePHi1JSk9P15w5czRmzBgdO3ZMQ4cOVUFBgTp16qQlS5YoKCjI/pi5c+dq2LBh6tq1q3x8fNS/f39Nnz79vI8FAAAAAIBTuc11ul2J64MCADyNN81d3jQWAMCFw+Ov0w0AAAAAgKcjdAMAAAAAYBGXHtMNeJKEsR86dX27J/d06voAAEDVMa8DOF/Y0w0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYxM/VBQAAAABVkTD2Q1eXAADVxp5uAAAAAAAsQugGAACntX//ft12222qXbu2goOD1bJlS33xxRf25cYYPfLII4qJiVFwcLBSU1O1fft2F1YMAID7IHQDAIBK/fLLL+rYsaP8/f21ePFibdmyRU8++aRq1apl7zNlyhRNnz5ds2bN0tq1axUSEqK0tDQVFxe7sHIAANwDx3QDAIBKPfHEE4qLi9Ps2bPtbYmJifafjTGaNm2aHn74YfXp00eS9MorrygqKkqLFi3SgAEDznvNAAC4E/Z0AwCASr333ntq27atbrrpJtWrV0+tW7fWCy+8YF++a9cu5eXlKTU11d4WHh6u9u3bKzc3t8J1lpSUqKioyOEGAIC3InQDAIBKff/995o5c6aSkpK0dOlS3X333brnnnv08ssvS5Ly8vIkSVFRUQ6Pi4qKsi87VVZWlsLDw+23uLg4awcBAIALEboBAEClysrKdNlll+nxxx9X69atNXToUA0ZMkSzZs0663VmZmaqsLDQftu3b58TKwYAwL0QugEAQKViYmLUrFkzh7amTZtq7969kqTo6GhJUn5+vkOf/Px8+7JTBQYGKiwszOEGAIC3InQDAIBKdezYUdu2bXNo++677xQfHy/pj5OqRUdHKycnx768qKhIa9euVUpKynmtFQAAd8TZywEAQKVGjRqlDh066PHHH9fNN9+szz//XM8//7yef/55SZLNZtPIkSP12GOPKSkpSYmJiRo3bpxiY2PVt29f1xYPAIAbIHQDAIBKtWvXTgsXLlRmZqYmTpyoxMRETZs2TQMHDrT3GTNmjI4dO6ahQ4eqoKBAnTp10pIlSxQUFOTCygEAcA+EbgAAcFrXX3+9rr/++kqX22w2TZw4URMnTjyPVQEA4Bk4phsAAAAAAIsQugEAAAAAsAihGwAAAAAAixC6AQAAAACwCKEbAAAAAACLELoBAAAAALAIoRsAAAAAAIsQugEAAAAAsAihGwAAAAAAixC6AQAAAACwCKEbAAAAAACLELoBAAAAALAIoRsAAAAAAIsQugEAAAAAsAihGwAAAAAAixC6AQAAAACwCKEbAAAAAACL+Lm6AAAAAMDTJYz90Knr2z25p1PXB8B12NMNAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARTh7OQAAACzh7DN6X0ic+dpxJnTAtdjTDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWMStQ3dpaanGjRunxMREBQcHq3HjxvrnP/8pY4y9jzFGjzzyiGJiYhQcHKzU1FRt377dhVUDAAAAAPAHtw7dTzzxhGbOnKlnn31W3377rZ544glNmTJFzzzzjL3PlClTNH36dM2aNUtr165VSEiI0tLSVFxc7MLKAQAAAACQ/FxdwOl89tln6tOnj3r27ClJSkhI0Ouvv67PP/9c0h97uadNm6aHH35Yffr0kSS98sorioqK0qJFizRgwACX1Q4AAAAAgFvv6e7QoYNycnL03XffSZI2btyo1atXq0ePHpKkXbt2KS8vT6mpqfbHhIeHq3379srNza10vSUlJSoqKnK4AQAAAADgbG69p3vs2LEqKipScnKyfH19VVpaqkmTJmngwIGSpLy8PElSVFSUw+OioqLsyyqSlZWlCRMmWFc4UAUJYz902rp2T+7ptHUBAAAAcB633tP95ptvau7cuZo3b56+/PJLvfzyy/r3v/+tl19++ZzWm5mZqcLCQvtt3759TqoYAAAAAID/49Z7uu+//36NHTvWfmx2y5YttWfPHmVlZSk9PV3R0dGSpPz8fMXExNgfl5+fr0svvbTS9QYGBiowMNDS2gEAAAAAcOs93b/++qt8fBxL9PX1VVlZmSQpMTFR0dHRysnJsS8vKirS2rVrlZKScl5rBQAAAADgVG69p7tXr16aNGmSGjZsqObNm+urr77S1KlT9de//lWSZLPZNHLkSD322GNKSkpSYmKixo0bp9jYWPXt29e1xQMAAAAALnhuHbqfeeYZjRs3Tv/4xz908OBBxcbG6q677tIjjzxi7zNmzBgdO3ZMQ4cOVUFBgTp16qQlS5YoKCjIhZUDAAAAAODmobtmzZqaNm2apk2bVmkfm82miRMnauLEieevMAAAAAAAqsCtQzcAAACAc+PMy5RKXKoUqC63PpEaAAAAAACejNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWISzlwMAAACoMmeeDZ0zoeNCwJ5uAABQqUcffVQ2m83hlpycbF9eXFysjIwM1a5dW6Ghoerfv7/y8/NdWDEAAO6F0A0AAE6refPm+vHHH+231atX25eNGjVK77//vhYsWKBVq1bpwIED6tevnwurBQDAvfD1cgAAcFp+fn6Kjo4u115YWKgXX3xR8+bNU5cuXSRJs2fPVtOmTbVmzRpdccUV57tUAADcDnu6AQDAaW3fvl2xsbFq1KiRBg4cqL1790qS1q9frxMnTig1NdXeNzk5WQ0bNlRubm6l6yspKVFRUZHDDQAAb0XoBgAAlWrfvr3mzJmjJUuWaObMmdq1a5euvPJKHTlyRHl5eQoICFBERITDY6KiopSXl1fpOrOyshQeHm6/xcXFWTwKAABch6+XAwCASvXo0cP+c6tWrdS+fXvFx8frzTffVHBw8FmtMzMzU6NHj7bfLyoqIngDALwWe7oBAECVRURE6OKLL9aOHTsUHR2t48ePq6CgwKFPfn5+hceAnxQYGKiwsDCHGwAA3orQDQAAquzo0aPauXOnYmJi1KZNG/n7+ysnJ8e+fNu2bdq7d69SUlJcWCUAAO6Dr5cDAIBK3XffferVq5fi4+N14MABjR8/Xr6+vrr11lsVHh6uwYMHa/To0YqMjFRYWJiGDx+ulJQUzlwOAMD/R+gGAACV+uGHH3Trrbfq559/Vt26ddWpUyetWbNGdevWlSQ99dRT8vHxUf/+/VVSUqK0tDTNmDHDxVUDAOA+CN3wWgljP3R1CQDg8ebPn3/a5UFBQcrOzlZ2dvZ5qggAAM/CMd0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE63TDrXBtbQAAAADehD3dAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEX8XF0AgHOXMPZDp65v9+SeTl0fAAAAcKFiTzcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFiF0AwAAAABgEUI3AAAAAAAWIXQDAAAAAGARQjcAAAAAABbhOt0AynHmdb+55jcAAAAuZOzpBgAAAADAIoRuAAAAAAAsQugGAAAAAMAihG4AAAAAACxC6AYAAAAAwCKEbgAAAAAALELoBgAAAADAIoRuAAAAAAAsQugGAAAAAMAihG4AAFBlkydPls1m08iRI+1txcXFysjIUO3atRUaGqr+/fsrPz/fdUUCAOBG3D5079+/X7fddptq166t4OBgtWzZUl988YV9uTFGjzzyiGJiYhQcHKzU1FRt377dhRUDAOCd1q1bp+eee06tWrVyaB81apTef/99LViwQKtWrdKBAwfUr18/F1UJAIB7cevQ/csvv6hjx47y9/fX4sWLtWXLFj355JOqVauWvc+UKVM0ffp0zZo1S2vXrlVISIjS0tJUXFzswsoBAPAuR48e1cCBA/XCCy84zMOFhYV68cUXNXXqVHXp0kVt2rTR7Nmz9dlnn2nNmjUurBgAAPfg1qH7iSeeUFxcnGbPnq3LL79ciYmJ6t69uxo3bizpj73c06ZN08MPP6w+ffqoVatWeuWVV3TgwAEtWrTItcUDAOBFMjIy1LNnT6Wmpjq0r1+/XidOnHBoT05OVsOGDZWbm3u+ywQAwO24deh+77331LZtW910002qV6+eWrdurRdeeMG+fNeuXcrLy3OY6MPDw9W+fXsmegAAnGT+/Pn68ssvlZWVVW5ZXl6eAgICFBER4dAeFRWlvLy8CtdXUlKioqIihxsAAN7KrUP3999/r5kzZyopKUlLly7V3XffrXvuuUcvv/yyJNkn86ioKIfHnW6il5jsAQCoqn379mnEiBGaO3eugoKCnLLOrKwshYeH229xcXFOWS8AAO7IrUN3WVmZLrvsMj3++ONq3bq1hg4dqiFDhmjWrFnntF4mewAAqmb9+vU6ePCgLrvsMvn5+cnPz0+rVq3S9OnT5efnp6ioKB0/flwFBQUOj8vPz1d0dHSF68zMzFRhYaH9tm/fvvMwEgAAXMOtQ3dMTIyaNWvm0Na0aVPt3btXkuyT+amXJTndRC8x2QMAUFVdu3bVpk2btGHDBvutbdu2GjhwoP1nf39/5eTk2B+zbds27d27VykpKRWuMzAwUGFhYQ43AAC8lZ+rCzidjh07atu2bQ5t3333neLj4yVJiYmJio6OVk5Oji699FJJUlFRkdauXau777670vUGBgYqMDDQsroBAPAWNWvWVIsWLRzaQkJCVLt2bXv74MGDNXr0aEVGRiosLEzDhw9XSkqKrrjiCleUDACAW3Hr0D1q1Ch16NBBjz/+uG6++WZ9/vnnev755/X8889Lkmw2m0aOHKnHHntMSUlJSkxM1Lhx4xQbG6u+ffu6tngAAC4QTz31lHx8fNS/f3+VlJQoLS1NM2bMcHVZAAC4BbcO3e3atdPChQuVmZmpiRMnKjExUdOmTdPAgQPtfcaMGaNjx45p6NChKigoUKdOnbRkyRKnnewFAAA4WrlypcP9oKAgZWdnKzs72zUFAQDgxtw6dEvS9ddfr+uvv77S5TabTRMnTtTEiRPPY1UAAAAAAJyZW59IDQAAAAAAT0boBgAAAADAIoRuAAAAAAAsQugGAAAAAMAihG4AAAAAACxC6AYAAAAAwCKEbgAAAAAALELoBgAAAADAIoRuAAAAAAAsclahu1GjRvr555/LtRcUFKhRo0bnXBQAAAAAAN7grEL37t27VVpaWq69pKRE+/fvP+eiAAAAAADwBn7V6fzee+/Zf166dKnCw8Pt90tLS5WTk6OEhASnFQcAAAAAgCerVuju27evJMlmsyk9Pd1hmb+/vxISEvTkk086rTgAAAAAADxZtUJ3WVmZJCkxMVHr1q1TnTp1LCkKAAAAAABvUK3QfdKuXbucXQcAAAAAAF7nrEK3JOXk5CgnJ0cHDx607wE/6aWXXjrnwgAAAAAA8HRnFbonTJigiRMnqm3btoqJiZHNZnN2XQAAAAAAeLyzCt2zZs3SnDlzdPvttzu7HgAAAAAAvMZZXaf7+PHj6tChg7NrAQAAAADAq5xV6P7b3/6mefPmObsWAAAAAAC8yll9vby4uFjPP/+8PvroI7Vq1Ur+/v4Oy6dOneqU4gAAAAAA8GRnFbq//vprXXrppZKkzZs3OyzjpGoAAAAAAPzhrEL3ihUrnF0HAAAAAABe56yO6QYAAAAAAGd2Vnu6r7nmmtN+jXz58uVnXRAAAAAAAN7irEL3yeO5Tzpx4oQ2bNigzZs3Kz093Rl1AQAAAADg8c4qdD/11FMVtj/66KM6evToORUEAAAAAIC3cOox3bfddpteeuklZ64SAAAAAACPdVZ7uiuTm5uroKAgZ64SAAAA51HC2A9dXQIuIM5+v+2e3NOp6wOc4axCd79+/RzuG2P0448/6osvvtC4ceOcUhgAAAAAAJ7urEJ3eHi4w30fHx81adJEEydOVPfu3Z1SGAAAAAAAnu6sQvfs2bOdXQcAAAAAAF7nnI7pXr9+vb799ltJUvPmzdW6dWunFAUAAAAAgDc4q9B98OBBDRgwQCtXrlRERIQkqaCgQNdcc43mz5+vunXrOrNGAAAAAAA80lldMmz48OE6cuSIvvnmGx0+fFiHDx/W5s2bVVRUpHvuucfZNQIAAAAA4JHOak/3kiVL9NFHH6lp06b2tmbNmik7O5sTqQEAAAAA8P+d1Z7usrIy+fv7l2v39/dXWVnZORcFAAAAAIA3OKvQ3aVLF40YMUIHDhywt+3fv1+jRo1S165dnVYcAAAAAACe7KxC97PPPquioiIlJCSocePGaty4sRITE1VUVKRnnnnG2TUCAAAAAOCRzuqY7ri4OH355Zf66KOPtHXrVklS06ZNlZqa6tTiAAAAAADwZNXa0718+XI1a9ZMRUVFstls6tatm4YPH67hw4erXbt2at68uT755BOragUAAAAAwKNUK3RPmzZNQ4YMUVhYWLll4eHhuuuuuzR16lSnFQcAAAAAgCerVujeuHGjrr322kqXd+/eXevXrz/nogAAAAAA8AbVCt35+fkVXirsJD8/Px06dOiciwIAAAAAwBtUK3TXr19fmzdvrnT5119/rZiYmHMuCgAAAAAAb1Ct0H3ddddp3LhxKi4uLrfst99+0/jx43X99dc7rTgAAAAAADxZtS4Z9vDDD+udd97RxRdfrGHDhqlJkyaSpK1btyo7O1ulpaV66KGHLCkUAADAGySM/dCp69s9uadT1wcAcK5q7emOiorSZ599phYtWigzM1M33HCDbrjhBj344INq0aKFVq9eraioKKtqBQAA59nMmTPVqlUrhYWFKSwsTCkpKVq8eLF9eXFxsTIyMlS7dm2Fhoaqf//+ys/Pd2HFAAC4l2rt6Zak+Ph4/fe//9Uvv/yiHTt2yBijpKQk1apVy4r6AACACzVo0ECTJ09WUlKSjDF6+eWX1adPH3311Vdq3ry5Ro0apQ8//FALFixQeHi4hg0bpn79+unTTz91dekAALiFaofuk2rVqqV27do5sxYAAOBmevXq5XB/0qRJmjlzptasWaMGDRroxRdf1Lx589SlSxdJ0uzZs9W0aVOtWbNGV1xxhStKBgDArVTr6+UAAODCVVpaqvnz5+vYsWNKSUnR+vXrdeLECaWmptr7JCcnq2HDhsrNzXVhpQAAuI+z3tMNAAAuDJs2bVJKSoqKi4sVGhqqhQsXqlmzZtqwYYMCAgIUERHh0D8qKkp5eXmVrq+kpEQlJSX2+0VFRVaVDgCAyxG6AQDAaTVp0kQbNmxQYWGh3nrrLaWnp2vVqlVnvb6srCxNmDDBiRUCwB+4OgDcEV8vBwAApxUQEKCLLrpIbdq0UVZWli655BI9/fTTio6O1vHjx1VQUODQPz8/X9HR0ZWuLzMzU4WFhfbbvn37LB4BAACuQ+gGAADVUlZWppKSErVp00b+/v7KycmxL9u2bZv27t2rlJSUSh8fGBhovwTZyRsAAN6Kr5cDAIBKZWZmqkePHmrYsKGOHDmiefPmaeXKlVq6dKnCw8M1ePBgjR49WpGRkQoLC9Pw4cOVkpLCmcsBAPj/CN0AAKBSBw8e1B133KEff/xR4eHhatWqlZYuXapu3bpJkp566in5+Piof//+KikpUVpammbMmOHiqgEAcB+EbgAAUKkXX3zxtMuDgoKUnZ2t7Ozs81QRAACehWO6AQAAAACwCKEbAAAAAACLELoBAAAAALAIoRsAAAAAAIsQugEAAAAAsAihGwAAAAAAixC6AQAAAACwCNfpxjlJGPuhq0sAAAAAALfFnm4AAAAAACxC6AYAAAAAwCIeFbonT54sm82mkSNH2tuKi4uVkZGh2rVrKzQ0VP3791d+fr7rigQAAAAA4P/zmNC9bt06Pffcc2rVqpVD+6hRo/T+++9rwYIFWrVqlQ4cOKB+/fq5qEoAAAAAAP6PR4Tuo0ePauDAgXrhhRdUq1Yte3thYaFefPFFTZ06VV26dFGbNm00e/ZsffbZZ1qzZo0LKwYAAAAAwENCd0ZGhnr27KnU1FSH9vXr1+vEiRMO7cnJyWrYsKFyc3PPd5kAAAAAADhw+0uGzZ8/X19++aXWrVtXblleXp4CAgIUERHh0B4VFaW8vLxK11lSUqKSkhL7/aKiIqfVCwAAAADASW69p3vfvn0aMWKE5s6dq6CgIKetNysrS+Hh4fZbXFyc09YNAAAAAMBJbh26169fr4MHD+qyyy6Tn5+f/Pz8tGrVKk2fPl1+fn6KiorS8ePHVVBQ4PC4/Px8RUdHV7rezMxMFRYW2m/79u2zeCQAAAAAgAuRW3+9vGvXrtq0aZND25133qnk5GQ98MADiouLk7+/v3JyctS/f39J0rZt27R3716lpKRUut7AwEAFBgZaWjsAAAAAAG4dumvWrKkWLVo4tIWEhKh27dr29sGDB2v06NGKjIxUWFiYhg8frpSUFF1xxRWuKBkAAAAAADu3Dt1V8dRTT8nHx0f9+/dXSUmJ0tLSNGPGDFeXBQAAAACA54XulStXOtwPCgpSdna2srOzXVMQAAAAAACVcOsTqQEAAAAA4MkI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARfxcXQAA75Yw9kOnrm/35J5OXR8AAABgJfZ0AwAAAABgEUI3AAAAAAAWIXQDAAAAAGARQjcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFiF0AwCASmVlZaldu3aqWbOm6tWrp759+2rbtm0OfYqLi5WRkaHatWsrNDRU/fv3V35+vosqBgDAvRC6AQBApVatWqWMjAytWbNGy5Yt04kTJ9S9e3cdO3bM3mfUqFF6//33tWDBAq1atUoHDhxQv379XFg1AADuw8/VBQAAAPe1ZMkSh/tz5sxRvXr1tH79el111VUqLCzUiy++qHnz5qlLly6SpNmzZ6tp06Zas2aNrrjiCleUDQCA22BPNwAAqLLCwkJJUmRkpCRp/fr1OnHihFJTU+19kpOT1bBhQ+Xm5la4jpKSEhUVFTncAADwVuzpBgAAVVJWVqaRI0eqY8eOatGihSQpLy9PAQEBioiIcOgbFRWlvLy8CteTlZWlCRMmWF3uBSNh7IeuLgEAcBrs6QYAAFWSkZGhzZs3a/78+ee0nszMTBUWFtpv+/btc1KFAAC4H/Z0AwCAMxo2bJg++OADffzxx2rQoIG9PTo6WsePH1dBQYHD3u78/HxFR0dXuK7AwEAFBgZaXTIAAG6BPd0AAKBSxhgNGzZMCxcu1PLly5WYmOiwvE2bNvL391dOTo69bdu2bdq7d69SUlLOd7kAALgd9nQDAIBKZWRkaN68eXr33XdVs2ZN+3Ha4eHhCg4OVnh4uAYPHqzRo0crMjJSYWFhGj58uFJSUjhzOQAAInQDAIDTmDlzpiSpc+fODu2zZ8/WoEGDJElPPfWUfHx81L9/f5WUlCgtLU0zZsw4z5UCAOCeCN0AAKBSxpgz9gkKClJ2drays7PPQ0UAAHgWjukGAAAAAMAihG4AAAAAACxC6AYAAAAAwCKEbgAAAAAALELoBgAAAADAIpy9HAAA4AwSxn7o6hIAAB6KPd0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE63QD8CjOvlbu7sk9nbo+AAAA4M/Y0w0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARfxcXcDpZGVl6Z133tHWrVsVHBysDh066IknnlCTJk3sfYqLi3Xvvfdq/vz5KikpUVpammbMmKGoqCgXVg4AAADA0yWM/dBp69o9uafT1gXP4tZ7uletWqWMjAytWbNGy5Yt04kTJ9S9e3cdO3bM3mfUqFF6//33tWDBAq1atUoHDhxQv379XFg1AAAAAAB/cOs93UuWLHG4P2fOHNWrV0/r16/XVVddpcLCQr344ouaN2+eunTpIkmaPXu2mjZtqjVr1uiKK65wRdkAAAAAAEhy8z3dpyosLJQkRUZGSpLWr1+vEydOKDU11d4nOTlZDRs2VG5ubqXrKSkpUVFRkcMNAAAAAABn85jQXVZWppEjR6pjx45q0aKFJCkvL08BAQGKiIhw6BsVFaW8vLxK15WVlaXw8HD7LS4uzsrSAQAAAAAXKI8J3RkZGdq8ebPmz59/zuvKzMxUYWGh/bZv3z4nVAgAAAAAgCO3Pqb7pGHDhumDDz7Qxx9/rAYNGtjbo6Ojdfz4cRUUFDjs7c7Pz1d0dHSl6wsMDFRgYKCVJQMAAAAA4N57uo0xGjZsmBYuXKjly5crMTHRYXmbNm3k7++vnJwce9u2bdu0d+9epaSknO9yAQAAAABw4NZ7ujMyMjRv3jy9++67qlmzpv047fDwcAUHBys8PFyDBw/W6NGjFRkZqbCwMA0fPlwpKSmcuRwAAAAA4HJuHbpnzpwpSercubND++zZszVo0CBJ0lNPPSUfHx/1799fJSUlSktL04wZM85zpQAAAAAAlOfWodsYc8Y+QUFBys7OVnZ29nmoCAAAAACAqnPrY7oBAAAAAPBkhG4AAAAAACxC6AYAAAAAwCKEbgAAAAAALELoBgAAAADAIoRuAAAAAAAsQugGAAAAAMAihG4AAAAAACxC6AYAAKf18ccfq1evXoqNjZXNZtOiRYsclhtj9MgjjygmJkbBwcFKTU3V9u3bXVMsAABuhtANAABO69ixY7rkkkuUnZ1d4fIpU6Zo+vTpmjVrltauXauQkBClpaWpuLj4PFcKAID78XN1AQAAwL316NFDPXr0qHCZMUbTpk3Tww8/rD59+kiSXnnlFUVFRWnRokUaMGDA+SwVAAC3w55uAABw1nbt2qW8vDylpqba28LDw9W+fXvl5uZW+JiSkhIVFRU53AAA8Fbs6QYAAGctLy9PkhQVFeXQHhUVZV92qqysLE2YMMHSuhLGfmjp+gEAqCr2dAMAgPMqMzNThYWF9tu+fftcXRIAAJYhdAMAgLMWHR0tScrPz3doz8/Pty87VWBgoMLCwhxuAAB4K0I3AAA4a4mJiYqOjlZOTo69raioSGvXrlVKSooLKwMAwD1wTDcAADito0ePaseOHfb7u3bt0oYNGxQZGamGDRtq5MiReuyxx5SUlKTExESNGzdOsbGx6tu3r+uKBgDATRC6AVzQnHmypd2TezptXYA7+eKLL3TNNdfY748ePVqSlJ6erjlz5mjMmDE6duyYhg4dqoKCAnXq1ElLlixRUFCQq0oGAMBtELoBAMBpde7cWcaYSpfbbDZNnDhREydOPI9VAQDgGTimGwAAAAAAixC6AQAAAACwCKEbAAAAAACLELoBAAAAALAIoRsAAAAAAIsQugEAAAAAsAiXDLsAOfO6xAAAAACAyrGnGwAAAAAAixC6AQAAAACwCKEbAAAAAACLELoBAAAAALAIJ1IDAAAAAA/j7JMj757c06nrw/9hTzcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFiF0AwAAAABgEUI3AAAAAAAW4ezlAAAAAGAxZ59tHJ6DPd0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE63R7AK7pBwAAAACeiT3dAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhOt0A4CTJIz90Knr2z25p1PXBwAAgPOPPd0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWIRjugHATXGMOAAAgOdjTzcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFuGYbgAAAAC4wDnzXDKcR8YRe7oBAAAAALAIoRsAAAAAAIsQugEAAAAAsAjHdFvA2dfWBQB3wzXEAQAAqoY93QAAAAAAWIQ93QAAAAAAt+XpZ1b3mj3d2dnZSkhIUFBQkNq3b6/PP//c1SUBAHBBYS4GAKA8r9jT/cYbb2j06NGaNWuW2rdvr2nTpiktLU3btm1TvXr1XF0eALgFzjdxdjh+vWqYiwEAqJhX7OmeOnWqhgwZojvvvFPNmjXTrFmzVKNGDb300kuuLg0AgAsCczEAABXz+NB9/PhxrV+/XqmpqfY2Hx8fpaamKjc314WVAQBwYWAuBgCgch7/9fKffvpJpaWlioqKcmiPiorS1q1bK3xMSUmJSkpK7PcLCwslSUVFRU6pqazkV6esBwAuFM76+2sFZ/9Nd9ZYT67HGOOU9Z2L6s7FVs/DEnMxALiSs+d1Z/5Nd2ZtVZ2LPT50n42srCxNmDChXHtcXJwLqgEAhE9zdQXnj7PHeuTIEYWHhzt3pRZjHgYA7+bO87oVtZ1pLvb40F2nTh35+voqPz/foT0/P1/R0dEVPiYzM1OjR4+23y8rK9Phw4dVu3Zt2Ww2S+utqqKiIsXFxWnfvn0KCwtzdTlO583j8+axSd49Pm8em+Td4/PmsUkVj88YoyNHjig2NtbF1VV/Lj7bedjbt3NVXOivAeO/sMcv8Rowfvcaf1XnYo8P3QEBAWrTpo1ycnLUt29fSX9M3jk5ORo2bFiFjwkMDFRgYKBDW0REhMWVnp2wsDC3eENZxZvH581jk7x7fN48Nsm7x+fNY5PKj89d9nBXdy4+13nY27dzVVzorwHjv7DHL/EaMH73GX9V5mKPD92SNHr0aKWnp6tt27a6/PLLNW3aNB07dkx33nmnq0sDAOCCwFwMAEDFvCJ033LLLTp06JAeeeQR5eXl6dJLL9WSJUvKndAFAABYg7kYAICKeUXolqRhw4ZV+nVyTxQYGKjx48eX+/qdt/Dm8Xnz2CTvHp83j03y7vF589gkzxmf1XOxp7wOVrrQXwPGf2GPX+I1YPyeOX6bcYdrjQAAAAAA4IV8XF0AAAAAAADeitANAAAAAIBFCN0AAAAAAFiE0O1Cjz76qGw2m8MtOTnZvry4uFgZGRmqXbu2QkND1b9/f+Xn57uw4tP7+OOP1atXL8XGxspms2nRokUOy40xeuSRRxQTE6Pg4GClpqZq+/btDn0OHz6sgQMHKiwsTBERERo8eLCOHj16HkdRuTONb9CgQeW257XXXuvQx13Hl5WVpXbt2qlmzZqqV6+e+vbtq23btjn0qcr7ce/everZs6dq1KihevXq6f7779fvv/9+PodSTlXG1rlz53Lb7u9//7tDH3ccmyTNnDlTrVq1sl+vMiUlRYsXL7Yv99TtJp15bJ683SoyefJk2Ww2jRw50t7mydvPCtnZ2UpISFBQUJDat2+vzz//3NUlWcJZf5O9xdn+bni6/fv367bbblPt2rUVHBysli1b6osvvrAvr8rnKk9VWlqqcePGKTExUcHBwWrcuLH++c9/6s+novKm8Xv7Z+iqON1rcOLECT3wwANq2bKlQkJCFBsbqzvuuEMHDhxwWIdbvwYGLjN+/HjTvHlz8+OPP9pvhw4dsi//+9//buLi4kxOTo754osvzBVXXGE6dOjgwopP77///a956KGHzDvvvGMkmYULFzosnzx5sgkPDzeLFi0yGzduNL179zaJiYnmt99+s/e59tprzSWXXGLWrFljPvnkE3PRRReZW2+99TyPpGJnGl96erq59tprHbbn4cOHHfq46/jS0tLM7NmzzebNm82GDRvMddddZxo2bGiOHj1q73Om9+Pvv/9uWrRoYVJTU81XX31l/vvf/5o6deqYzMxMVwzJripju/rqq82QIUMctl1hYaF9ubuOzRhj3nvvPfPhhx+a7777zmzbts08+OCDxt/f32zevNkY47nbzZgzj82Tt9upPv/8c5OQkGBatWplRowYYW/35O3nbPPnzzcBAQHmpZdeMt98840ZMmSIiYiIMPn5+a4uzemc8TfZW5zt74anO3z4sImPjzeDBg0ya9euNd9//71ZunSp2bFjh71PVT5XeapJkyaZ2rVrmw8++MDs2rXLLFiwwISGhpqnn37a3sebxu/tn6Gr4nSvQUFBgUlNTTVvvPGG2bp1q8nNzTWXX365adOmjcM63Pk1IHS70Pjx480ll1xS4bKCggLj7+9vFixYYG/79ttvjSSTm5t7nio8e6f+spSVlZno6Gjzr3/9y95WUFBgAgMDzeuvv26MMWbLli1Gklm3bp29z+LFi43NZjP79+8/b7VXRWWhu0+fPpU+xpPGd/DgQSPJrFq1yhhTtffjf//7X+Pj42Py8vLsfWbOnGnCwsJMSUnJ+R3AaZw6NmP+CG9//jB3Kk8Z20m1atUy//nPf7xqu510cmzGeM92O3LkiElKSjLLli1zGJM3br9zcfnll5uMjAz7/dLSUhMbG2uysrJcWNX5cTZ/k73BufxueLoHHnjAdOrUqdLlVflc5cl69uxp/vrXvzq09evXzwwcONAY493j9/bP0FVR0efsU33++edGktmzZ48xxv1fA75e7mLbt29XbGysGjVqpIEDB2rv3r2SpPXr1+vEiRNKTU21901OTlbDhg2Vm5vrqnLP2q5du5SXl+cwnvDwcLVv394+ntzcXEVERKht27b2PqmpqfLx8dHatWvPe81nY+XKlapXr56aNGmiu+++Wz///LN9mSeNr7CwUJIUGRkpqWrvx9zcXLVs2VJRUVH2PmlpaSoqKtI333xzHqs/vVPHdtLcuXNVp04dtWjRQpmZmfr111/tyzxlbKWlpZo/f76OHTumlJQUr9pup47tJG/YbhkZGerZs6fDdpK86/fuXB0/flzr1693eC18fHyUmprqkXNidZ3N32RvcC6/G57uvffeU9u2bXXTTTepXr16at26tV544QX78qp8rvJkHTp0UE5Ojr777jtJ0saNG7V69Wr16NFDkveP/88ulM/Q1VVYWCibzaaIiAhJ7v8a+Lm6gAtZ+/btNWfOHDVp0kQ//vijJkyYoCuvvFKbN29WXl6eAgIC7G+kk6KiopSXl+eags/ByZr//MHw5P2Ty/Ly8lSvXj2H5X5+foqMjPSIMV977bXq16+fEhMTtXPnTj344IPq0aOHcnNz5evr6zHjKysr08iRI9WxY0e1aNFCkqr0fszLy6tw+55c5g4qGpsk/eUvf1F8fLxiY2P19ddf64EHHtC2bdv0zjvvSHL/sW3atEkpKSkqLi5WaGioFi5cqGbNmmnDhg0ev90qG5vk+dtNkubPn68vv/xS69atK7fMW37vnOGnn35SaWlphWPdunWri6o6P872b7KnO9ffDU/3/fffa+bMmRo9erQefPBBrVu3Tvfcc48CAgKUnp5epc9Vnmzs2LEqKipScnKyfH19VVpaqkmTJmngwIGSqva50ltcCJ+hq6u4uFgPPPCAbr31VoWFhUly/9eA0O1CJ/9bJ0mtWrVS+/btFR8frzfffFPBwcEurAxnY8CAAfafW7ZsqVatWqlx48ZauXKlunbt6sLKqicjI0ObN2/W6tWrXV2K01U2tqFDh9p/btmypWJiYtS1a1ft3LlTjRs3Pt9lVluTJk20YcMGFRYW6q233lJ6erpWrVrl6rKcorKxNWvWzOO32759+zRixAgtW7ZMQUFBri4Hbsqb/yZXht+NP/7Z0rZtWz3++OOSpNatW2vz5s2aNWuW0tPTXVyd9d58803NnTtX8+bNU/PmzbVhwwaNHDlSsbGxF8T4UbkTJ07o5ptvljFGM2fOdHU5VcbXy91IRESELr74Yu3YsUPR0dE6fvy4CgoKHPrk5+crOjraNQWeg5M1n3pm0T+PJzo6WgcPHnRY/vvvv+vw4cMeOeZGjRqpTp062rFjhyTPGN+wYcP0wQcfaMWKFWrQoIG9vSrvx+jo6Aq378llrlbZ2CrSvn17SXLYdu48toCAAF100UVq06aNsrKydMkll+jpp5/2iu1W2dgq4mnbbf369Tp48KAuu+wy+fn5yc/PT6tWrdL06dPl5+enqKgoj99+zlKnTh35+vqedg7xRufyN9mTOeN3w9PFxMTYv9VzUtOmTe2HIVblc5Unu//++zV27FgNGDBALVu21O23365Ro0YpKytLkveP/88uxM/QlTkZuPfs2aNly5bZ93JL7v8aELrdyNGjR7Vz507FxMSoTZs28vf3V05Ojn35tm3btHfvXofjGT1FYmKioqOjHcZTVFSktWvX2seTkpKigoICrV+/3t5n+fLlKisrs3+Y9iQ//PCDfv75Z8XExEhy7/EZYzRs2DAtXLhQy5cvV2JiosPyqrwfU1JStGnTJoc/eCf/IJ76weF8OtPYKrJhwwZJcth27ji2ypSVlamkpMSjt1tlTo6tIp623bp27apNmzZpw4YN9lvbtm01cOBA+8/etv3OVkBAgNq0aePwWpSVlSknJ8cj58QzccbfZE/mjN8NT9exY8dyl4n77rvvFB8fL6lqn6s82a+//iofH8eY4uvrq7KyMkneP/4/uxA/Q1fkZODevn27PvroI9WuXdthudu/Bq49j9uF7d577zUrV640u3btMp9++qlJTU01derUMQcPHjTG/HE5jIYNG5rly5ebL774wqSkpJiUlBQXV125I0eOmK+++sp89dVXRpKZOnWq+eqrr+xnFZw8ebKJiIgw7777rvn6669Nnz59KrzcQevWrc3atWvN6tWrTVJSktuc6v904zty5Ii57777TG5urtm1a5f56KOPzGWXXWaSkpJMcXGxfR3uOr67777bhIeHm5UrVzpcfunXX3+19znT+/HkpYu6d+9uNmzYYJYsWWLq1q3r8ksXnWlsO3bsMBMnTjRffPGF2bVrl3n33XdNo0aNzFVXXWVfh7uOzRhjxo4da1atWmV27dplvv76azN27Fhjs9nM//73P2OM5243Y04/Nk/fbpU59Yzsnrz9nG3+/PkmMDDQzJkzx2zZssUMHTrUREREOJy53Vs442+yt6nu74an+/zzz42fn5+ZNGmS2b59u5k7d66pUaOGee211+x9qvK5ylOlp6eb+vXr2y8Z9s4775g6deqYMWPG2Pt40/i9/TN0VZzuNTh+/Ljp3bu3adCggdmwYYPD38U/X6nDnV8DQrcL3XLLLSYmJsYEBASY+vXrm1tuucXh+ou//fab+cc//mFq1aplatSoYW644Qbz448/urDi01uxYoWRVO6Wnp5ujPnjkgfjxo0zUVFRJjAw0HTt2tVs27bNYR0///yzufXWW01oaKgJCwszd955pzly5IgLRlPe6cb366+/mu7du5u6desaf39/Ex8fb4YMGVLuw6C7jq+icUkys2fPtvepyvtx9+7dpkePHiY4ONjUqVPH3HvvvebEiRPneTSOzjS2vXv3mquuuspERkaawMBAc9FFF5n777/f4XrPxrjn2Iwx5q9//auJj483AQEBpm7duqZr1672wG2M5243Y04/Nk/fbpU5NVh48vazwjPPPGMaNmxoAgICzOWXX27WrFnj6pIs4ay/yd7kbH43PN37779vWrRoYQIDA01ycrJ5/vnnHZZX5XOVpyoqKjIjRowwDRs2NEFBQaZRo0bmoYcecghY3jR+b/8MXRWnew127dpV6d/FFStW2Nfhzq+BzRhjnL//HAAAAAAAcEw3AAAAAAAWIXQDAAAAAGARQjcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFiF0AwAAAABgEUI3AAAAAAAWIXQDAAAAAGARQjcAtzZo0CD17dvX1WUAAHDBYi4Gzg2hG4Ak10+ou3fvls1m04YNG1xWAwAArsRcDHgnQjcAAAAAABYhdAM4o82bN6tHjx4KDQ1VVFSUbr/9dv3000/25Z07d9Y999yjMWPGKDIyUtHR0Xr00Ucd1rF161Z16tRJQUFBatasmT766CPZbDYtWrRIkpSYmChJat26tWw2mzp37uzw+H//+9+KiYlR7dq1lZGRoRMnTlg5ZAAA3ApzMeC5CN0ATqugoEBdunRR69at9cUXX2jJkiXKz8/XzTff7NDv5ZdfVkhIiNauXaspU6Zo4sSJWrZsmSSptLRUffv2VY0aNbR27Vo9//zzeuihhxwe//nnn0uSPvroI/34449655137MtWrFihnTt3asWKFXr55Zc1Z84czZkzx9qBAwDgJpiLAc/m5+oCALi3Z599Vq1bt9bjjz9ub3vppZcUFxen7777ThdffLEkqVWrVho/frwkKSkpSc8++6xycnLUrVs3LVu2TDt37tTKlSsVHR0tSZo0aZK6detmX2fdunUlSbVr17b3OalWrVp69tln5evrq+TkZPXs2VM5OTkaMmSIpWMHAMAdMBcDno3QDeC0Nm7cqBUrVig0NLTcsp07dzpM9H8WExOjgwcPSpK2bdumuLg4hwn88ssvr3INzZs3l6+vr8O6N23aVK1xAADgqZiLAc9G6AZwWkePHlWvXr30xBNPlFsWExNj/9nf399hmc1mU1lZmVNqsHLdAAC4O+ZiwLMRugGc1mWXXaa3335bCQkJ8vM7uz8ZTZo00b59+5Sfn6+oqChJ0rp16xz6BAQESPrjmDMAAPB/mIsBz8aJ1ADYFRYWasOGDQ63oUOH6vDhw7r11lu1bt067dy5U0uXLtWdd95Z5Um5W7duaty4sdLT0/X111/r008/1cMPPyzpj/+US1K9evUUHBxsPzlMYWGhZeMEAMBdMRcD3ofQDcBu5cqVat26tcPtn//8pz799FOVlpaqe/fuatmypUaOHKmIiAj5+FTtT4ivr68WLVqko0ePql27dvrb3/5mP2NqUFCQJMnPz0/Tp0/Xc889p9jYWPXp08eycQIA4K6YiwHvYzPGGFcXAeDC8+mnn6pTp07asWOHGjdu7OpyAAC44DAXA+cHoRvAebFw4UKFhoYqKSlJO3bs0IgRI1SrVi2tXr3a1aUBAHBBYC4GXIMTqQE4L44cOaIHHnhAe/fuVZ06dZSamqonn3zS1WUBAHDBYC4GXIM93QAAAAAAWIQTqQEAAAAAYBFCNwAAAAAAFiF0AwAAAABgEUI3AAAAAAAWIXQDAAAAAGARQjcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFiF0AwAAAABgkf8HIfLF3pT9t5EAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "fig, axes = plt.subplots(1, 2, figsize=(10, 5) )\n", + "axes[0].hist(Len_SciTLDR_Val_Textt, bins = 20 )\n", + "axes[0].set_title(\"SciTLDR_Val_Text Length\")\n", + "axes[0].set_xlabel(\"Length\")\n", + "axes[0].set_ylabel(\"Count\")\n", + "\n", + "axes[1].hist(Len_SciTLDR_Val_Summaryy, bins = 20 )\n", + "axes[1].set_title(\"SciTLDR_Val_Summary Length\")\n", + "axes[1].set_xlabel(\"Length\")\n", + "plt.tight_layout()\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 571, + "id": "204fa877", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_dev.drop(NN_SciTLDR_dev[NN_SciTLDR_dev.Len_SciTLDR_Val_Text < 50].index, inplace=True)\n", + "NN_SciTLDR_dev.drop(NN_SciTLDR_dev[NN_SciTLDR_dev.Len_SciTLDR_Val_Text > 350].index, inplace=True)\n", + "\n", + "NN_SciTLDR_dev.drop(NN_SciTLDR_dev[NN_SciTLDR_dev.Len_SciTLDR_Val_Summary < 15].index, inplace=True)\n", + "NN_SciTLDR_dev.drop(NN_SciTLDR_dev[NN_SciTLDR_dev.Len_SciTLDR_Val_Summary > 130].index, inplace=True)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 572, + "id": "5d3e9f1f", + "metadata": {}, + "outputs": [], + "source": [ + "NeW_Len_SciTLDR_Val_Text = []\n", + "NeW_Len_SciTLDR_Val_Text = NN_SciTLDR_dev['Len_SciTLDR_Val_Text'].values.tolist()\n", + "\n", + "NeW_Len_SciTLDR_Val_Summary = []\n", + "NeW_Len_SciTLDR_Val_Summary = NN_SciTLDR_dev['Len_SciTLDR_Val_Summary'].values.tolist()\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6de6abb0", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 573, + "id": "76653ed7", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA90AAAHqCAYAAAAZLi26AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy88F64QAAAACXBIWXMAAA9hAAAPYQGoP6dpAABWGUlEQVR4nO3dfVxUZf7/8fdwjyC3IniDgGaipmXqKmllipJr3iSVuVZYbrUtmuK2KbVmuhXmrmkW2s262JbmxpaWteoqqWWhKWVpFqmpmApWBqgFEly/P/o530ZQAec43Lyej8d5PJzrnLnmcy5Grnlz5pxjM8YYAQAAAAAAp3NzdQEAAAAAADRUhG4AAAAAACxC6AYAAAAAwCKEbgAAAAAALELoBgAAAADAIoRuAAAAAAAsQugGAAAAAMAihG4AAAAAACxC6AYAAAAAwCKEbuD/69evn/r16+fqMuqVRx99VDabzdVlQFJ0dLRuuOEGV5cBoIFjrqw55kpcqA0bNshms+k///mPq0tBLRG6Ua/t2LFDN910k6KiouTj46NWrVpp4MCBeuaZZy6o3379+slms513efTRRyVVL/CMHTvW4bn+/v5q27atbrrpJr3++uuqqKg4bx2+vr7q2rWr5s2bV+X2Z3P06FF5eHjotttuO+s2x48fl6+vr0aOHFntfmti//791RpTm82m/fv3O+U1d+3apUcffbTa/Z3+YPTdd9855fWdrab7AwASc2V11YW58tc2bdqkwYMHq1WrVvLx8VGbNm00dOhQLV261PLXbqjq+h+oly5dqnnz5rm6DFjAw9UFALX14Ycf6rrrrlObNm109913KyIiQgcPHtTmzZv19NNPa8KECTXq73//+5/93w8//LB+//vf2x9v3bpV8+fP10MPPaSOHTva27t27Vqj1/D29tY//vEPSdJPP/2kAwcOaOXKlbrpppvUr18/vfnmmwoICHB4TuvWrZWWliZJ+u6777R06VKlpKTo22+/1eOPP16t123evLkGDhyoN998Uz/++KOaNGlSaZs33nhDJSUl5/ywcSHCwsL08ssvO7TNmTNH33zzjebOnVtpW2fYtWuXZsyYoX79+ik6OtopfbpSQ9sfANZjrqxfc+VpmZmZGjVqlK644gpNnDhRwcHB2rdvn9577z29+OKL+t3vfmfp68M1li5dqp07d2rSpEmuLgXOZoB66re//a0JCwszP/zwQ6V1BQUFTn2tzMxMI8msX7++yvVRUVFmyJAh5+wjKSnJ+Pn5VbkuLS3NSDK33HKLQ/u1115rOnfu7ND2008/maioKNO0aVPz888/V3sfXn75ZSPJvPrqq1WuHzRokAkMDDQlJSXV7nP69OnmQn6NDBkyxERFRdX6+edzvp/bmU7vz7fffmtZTRfiXPtTnfcggMaHubJ+zpWdOnUynTt3NqWlpZXWOfvnVh+Ul5ebn3766YL7qetz5dk+F61fv95IMpmZmRe/KDgFXy9HvbV371517txZQUFBldY1b968Utsrr7yi3/zmN2rSpImCg4N1zTXXOPzF3pXnqU2dOlWDBg1SZmamvvrqq3Nu6+Pjo549e+r48eM6evRotV/jxhtvlJ+fX5VfSzt69KiysrJ00003ydvbW++//75uvvlmtWnTRt7e3oqMjFRKSop++umnGu9bTZWWlmr69Om65JJL7K/94IMPqrS01L5NUlKSfHx89MUXXzg8NyEhQcHBwTp8+LAWL16sm2++WZJ03XXX2b92uGHDhguu8csvv9RNN92kkJAQ+fj4qEePHnrrrbcctlm8eLFsNps++OADTZ48WWFhYfLz89ONN96ob7/91mHbiooKPfroo2rZsqWaNGmi6667Trt27VJ0dLTGjh1r7686+7Np0yb95je/kY+Pj9q2bat//etfF7y/AOov5sr6OVfu3btXPXv2lJeXV6V1v/65nT7X98y54PQpXYsXL7a3jR07Vv7+/srLy9MNN9wgf39/tWrVSunp6ZJ+OQ2hf//+8vPzU1RUVKUxOD2vbdq0Sffff7/CwsIUFBSke++9V6dOnVJhYaHuuOMOBQcHKzg4WA8++KCMMQ59/P3vf9dVV12l0NBQ+fr6qnv37lWep2yz2TR+/HgtWbJEnTt3lre3t1atWqXo6GgNHz680vYlJSUKDAzUvffee96xrY5XXnlF3bt3l6+vr0JCQnTrrbfq4MGDDtv069dPl112mXbt2qXrrrtOTZo0UatWrTR79uxK/R04cEDDhg2Tn5+fmjdvrpSUFK1Zs8bhZ9evXz+98847OnDggH2OP/NbbRUVFXr88cfVunVr+fj4aMCAAdqzZ49T9hnWInSj3oqKilJOTo527tx53m1nzJih22+/XZ6enpo5c6ZmzJihyMhIvfvuuxeh0uq5/fbbZYzR2rVrz7vt6cm0qg9RZ+Pn56fhw4drzZo1OnbsmMO6f//73yovL9eYMWMk/fK1th9//FH33XefnnnmGSUkJOiZZ57RHXfcUaN9qqmKigoNGzZMf//73zV06FA988wzGjFihObOnatRo0bZt3v66acVFhampKQklZeXS5Kef/55/e9//9Mzzzyjli1b6pprrtH9998vSXrooYf08ssv6+WXX3b4ymNtfP755+rdu7e++OILTZ06VXPmzJGfn59GjBih5cuXV9p+woQJ+vTTTzV9+nTdd999WrlypcaPH++wTWpqqmbMmKEePXrob3/7m9q3b6+EhASdPHnSvk119mfPnj266aabNHDgQM2ZM0fBwcEaO3asPv/88wvaZwD1F3Nl/Zwro6KilJWVpW+++eaC+/q18vJyDR48WJGRkZo9e7aio6M1fvx4LV68WNdff7169OihJ598Uk2bNtUdd9yhffv2VepjwoQJ2r17t2bMmKFhw4bphRde0LRp0zR06FCVl5friSeeUN++ffW3v/2t0mllTz/9tLp166aZM2fqiSeekIeHh26++Wa98847lV7n3XffVUpKikaNGqWnn35aMTExuu2227Rq1apKP5uVK1equLjYKV/7f/zxx3XHHXeoffv2euqppzRp0iRlZWXpmmuuUWFhocO2P/zwg66//npdfvnlmjNnjmJjYzVlyhStWrXKvs3JkyfVv39/rVu3Tvfff78efvhhffjhh5oyZYpDXw8//LCuuOIKNWvWzD7Hn3l+96xZs7R8+XI98MADSk1N1ebNm+3vR9RxLj7SDtTa//73P+Pu7m7c3d1NXFycefDBB82aNWvMqVOnHLbbvXu3cXNzMzfeeKMpLy93WFdRUWH/97XXXmuuvfbaKl/L6q/MGWPMJ598YiSZlJQUh5piY2PNt99+a7799lvz5Zdfmj//+c9GUq2+HvXOO+8YSeb55593aO/du7dp1aqVfXx+/PHHSs9NS0szNpvNHDhwwN7m7K+Xv/zyy8bNzc28//77Dts999xzRpL54IMP7G1r1qwxksxjjz1mvv76a+Pv729GjBjh8Dwrvl4+YMAA06VLF4evFlZUVJirrrrKtG/f3t6WkZFhJJn4+HiH91lKSopxd3c3hYWFxhhj8vPzjYeHR6XaH330USPJJCUlVWt/oqKijCTz3nvv2duOHj1qvL29zZ/+9Kdq7T+Ahoe5sn7OlYsWLTKSjJeXl7nuuuvMtGnTzPvvv1/pZ3P6a8dnjvm+ffuMJJORkWFvS0pKMpLME088YW/74YcfjK+vr7HZbGbZsmX29i+//NJIMtOnT7e3nZ7XEhISHN4TcXFxxmazmT/84Q/2tp9//tm0bt260nvlzDE7deqUueyyy0z//v0d2iUZNzc38/nnnzu05+bmGklm4cKFDu3Dhg0z0dHRDnVV5Xzvwf379xt3d3fz+OOPO7Tv2LHDeHh4OLRfe+21RpL517/+ZW8rLS01ERERJjEx0d42Z84cI8msWLHC3vbTTz+Z2NjYSj+78329vGPHjg6nHDz99NNGktmxY8c59xuux5Fu1FsDBw5Udna2hg0bpk8//VSzZ89WQkKCWrVq5fBV3xUrVqiiokKPPPKI3Nwc3/J16RYe/v7+kn65MuqvffnllwoLC1NYWJhiY2P1t7/9TcOGDXP4ylh1DRo0SGFhYQ5fGdu3b582b96s0aNH28fH19fXvv7kyZP67rvvdNVVV8kYo08++aQWe1c9mZmZ6tixo2JjY/Xdd9/Zl/79+0uS1q9f77Av9957r2bOnKmRI0fKx8dHzz//vGW1SdKxY8f07rvv6pZbbtHx48ft9X3//fdKSEjQ7t27dejQIYfn3HPPPQ7vs6uvvlrl5eU6cOCAJCkrK0s///yz/vjHPzo8r6YXN5KkTp066eqrr7Y/DgsLU4cOHfT111/XuC8ADQNz5eIav0ZdmCvvuusurV69Wv369dOmTZv017/+VVdffbXat2+vDz/88IL6/vXF74KCgtShQwf5+fnplltusbd36NBBQUFBVc4f48aNc3hP9OrVS8YYjRs3zt7m7u6uHj16VHr+r8fshx9+UFFRka6++mp9/PHHlV7n2muvVadOnRzaLr30UvXq1UtLliyxtx07dkyrVq3SmDFjLvi9+sYbb6iiokK33HKLw+eQiIgItW/f3uFziPTL+/HXR9e9vLz0m9/8xmG/V69erVatWmnYsGH2Nh8fH9199901ru/OO+90OOXg9JzPPF/3EbpRr/Xs2VNvvPGGfvjhB3300UdKTU3V8ePHddNNN2nXrl2Sfjkvys3NrdIv7rrmxIkTkqSmTZs6tEdHR2vt2rVas2aNFixYoFatWunbb7+Vj49PjV/Dw8NDo0aN0vvvv28Ph6c/VPz660l5eXkaO3asQkJC5O/vr7CwMF177bWSpKKiolrtX3Xs3r1bn3/+uf2D0+nl0ksvlaRK5+X9/e9/V0hIiLZv36758+dXeX6iM+3Zs0fGGE2bNq1SjdOnT6+yxjZt2jg8Dg4OlvTLhw1J9vB9ySWXOGwXEhJi37a6znyt0693+rUANE7MlTVTV+bKhIQErVmzRoWFhXrvvfeUnJysAwcO6IYbbqjReeq/5uPjU+kOIYGBgWrdunWlwBoYGFjl/HHmXBMYGChJioyMPO/z3377bfXu3Vs+Pj4KCQlRWFiYFi5cWOV4xcTEVLkPd9xxhz744AP7/JmZmamysjLdfvvtVW5fE7t375YxRu3bt680z3/xxReVxr2qcTtz3j1w4IDatWtXabsz5/3qON9nCtRd3DIMDYKXl5d69uypnj176tJLL9Wdd96pzMxMexCqD06fb3fmL2E/Pz/Fx8fbH/fp00dXXnmlHnroIc2fP7/Gr3Pbbbfp2Wef1auvvqoHHnhAr776qjp16qQrrrhC0i/new0cOFDHjh3TlClTFBsbKz8/Px06dEhjx46t0T1Pa6qiokJdunTRU089VeX6Myf0Tz75xD4B7tixQ6NHj7asttP1SdIDDzyghISEKrc58+fn7u5e5XbmjIvLOMPFfC0A9Q9zZfXVpbmySZMmuvrqq3X11VerWbNmmjFjhlatWqWkpKSzHtk9fb2TM51tnqjJ/FGTPn79/Pfff1/Dhg3TNddcowULFqhFixby9PRURkZGlReu+/VR8V+79dZblZKSoiVLluihhx7SK6+8oh49eqhDhw5Vbl8TFRUVstlsWrVqVZX7c/qbFqdd7HmXeb7+InSjwenRo4ck6ciRI5Kkdu3aqaKiQrt27bJPlnXRyy+/LJvNpoEDB55zu65du+q2227T888/rwceeKDKo5vn0qtXL7Vr105Lly7VwIED9fnnnzvcw3THjh366quv9NJLLzlcDKY6F625UO3atdOnn36qAQMGnPcrYidPntSdd96pTp066aqrrtLs2bN14403qmfPnvZtnP2VyLZt20qSPD09HT7cXYioqChJvxxF//Vf9b///vtKf7muS1/xBFC/MVeeW12dK8/8uZ0+0nnmBb5OHwWuS15//XX5+PhozZo18vb2trdnZGTUqJ+QkBANGTJES5Ys0ZgxY/TBBx9UuuBYbbVr107GGMXExNi/ZXehoqKitGvXLhljHObxqq46zjzfcPH1ctRb69evr/Ive//9738lyf4XzxEjRsjNzU0zZ86s9JfnuvKXwVmzZul///ufRo0apfbt2593+wcffFBlZWVnPSJ8PmPGjNEnn3yi6dOny2az6Xe/+5193em/ov56bIwxevrpp2v1WjVxyy236NChQ3rxxRcrrfvpp58cruY9ZcoU5eXl6aWXXtJTTz2l6OhoJSUlOdxazM/PT1LlDyO11bx5c/Xr10/PP/+8/QPPr515K7DqGDBggDw8PLRw4UKH9meffbbSts7eHwANH3Nl/Zwrs7Kyqmw/8+cWFRUld3d3vffeew7bLViwwCl1OJO7u7tsNpvDUfj9+/drxYoVNe7r9ttv165du/TnP/9Z7u7uuvXWW51S48iRI+Xu7q4ZM2ZUet8bY/T999/XuM+EhAQdOnTI4RoKJSUlVX7W8fPzs/Q0PrgOR7pRb02YMEE//vijbrzxRsXGxurUqVP68MMP9e9//1vR0dG68847Jf3yFbSHH37YfhGSkSNHytvbW1u3blXLli2VlpbmlHr27Nmjxx57rFJ7t27dNGTIEEnSzz//rFdeeUXSL79wDxw4oLfeekufffaZrrvuOr3wwgvVeq1OnTrpt7/9rf7xj39o2rRpCg0NrVGtt912m2bOnKk333xTffr0cbgPZGxsrNq1a6cHHnhAhw4dUkBAgF5//fWLcr7Q7bffrtdee01/+MMftH79evXp00fl5eX68ssv9dprr2nNmjXq0aOH3n33XS1YsEDTp0/XlVdeKemXv5T369dP06ZNs98j84orrpC7u7uefPJJFRUVydvbW/379z/vud9PPfWUmjRp4tDm5uamhx56SOnp6erbt6+6dOmiu+++W23btlVBQYGys7P1zTff6NNPP63RPoeHh2vixImaM2eOhg0bpuuvv16ffvqpVq1apWbNmjn81bu2+wOg8WKurJ9z5fDhwxUTE6OhQ4eqXbt2OnnypNatW6eVK1eqZ8+eGjp0qKRfzpu++eab9cwzz8hms6ldu3Z6++23a33Ot5WGDBmip556Stdff71+97vf6ejRo0pPT9cll1yizz77rMZ9hYaGKjMzU4MHD67RPHi+9+Bjjz2m1NRU7d+/XyNGjFDTpk21b98+LV++XPfcc48eeOCBGtV677336tlnn9Xo0aM1ceJEtWjRQkuWLLFfb+DX83z37t3173//W5MnT1bPnj3l7+9v/1mjnruIV0oHnGrVqlXmrrvuMrGxscbf3994eXmZSy65xEyYMMEUFBRU2v6f//yn6datm/H29jbBwcHm2muvNWvXrrWvv9DboEiqchk3bpwx5v9u1XF6adKkiYmOjjaJiYnmP//5T6XbgJyuqXPnzlW+5oYNGyrdzqMmevbsaSSZBQsWVFq3a9cuEx8fb/z9/U2zZs3M3XffbT799NNKtx9x9i3DjPnl9iFPPvmk6dy5s/1n1b17dzNjxgxTVFRkiouLTVRUlLnyyitNWVmZw3NTUlKMm5ubyc7Otre9+OKLpm3btsbd3f28tw87vT9VLe7u7vbt9u7da+644w4TERFhPD09TatWrcwNN9xg/vOf/9i3OX1rla1btzq8RlW3d/n555/NtGnTTEREhPH19TX9+/c3X3zxhQkNDXW4Bcu59udst0E51/saQMPHXFk/58pXX33V3HrrraZdu3bG19fX+Pj4mE6dOpmHH37YFBcXO2z77bffmsTERNOkSRMTHBxs7r33XrNz584qbxlW1e3YzjZ+Z84rZ5vXzna7zapeb9GiRaZ9+/bG29vbxMbGmoyMjCrHR5JJTk4+5xj98Y9/NJLM0qVLz7ndmft0vvegMca8/vrrpm/fvsbPz8/4+fmZ2NhYk5ycbHJzc+3bnG3ckpKSKn22+frrr82QIUOMr6+vCQsLM3/605/M66+/biSZzZs327c7ceKE+d3vfmeCgoKMJHs/pz87ZGZmOvRb1a3hUDfZjKkj3xkCANgVFhYqODhYjz32mB5++GFXlwMAQJ2SkpKiRYsWKT8/v9K30+qDefPmKSUlRd98841atWrl6nJgMc7pBgAX++mnnyq1nb4oTL9+/S5uMQAA1HElJSV65ZVXlJiYWC8C95nzfElJiZ5//nm1b9+ewN1IcE430AB8++23Z709iPTLbWJCQkIsraGoqKjK8PhrERERltZQX/373//W4sWL9dvf/lb+/v7atGmTXn31VQ0aNEh9+vRxdXkA0CAwV9Z/R48e1bp16/Sf//xH33//vSZOnOjqkqpl5MiRatOmja644goVFRXplVde0ZdffqklS5a4ujRcJIRuoAHo2bPnOW8Pcu2112rDhg2W1jBx4kS99NJL59yGs1mq1rVrV3l4eGj27NkqLi62X1ytqgu9AABqh7my/tu1a5fGjBmj5s2ba/78+XX69na/lpCQoH/84x9asmSJysvL1alTJy1btkyjRo1ydWm4SDinG2gAPvjgg3P+5Tw4OFjdu3e3tIZdu3bp8OHD59zGWfe2BgCgppgrAbgKoRsAAAAAAItwITUAAAAAACzS4M/prqio0OHDh9W0aVOHm88DAFCfGWN0/PhxtWzZUm5udftv6MzFAICGqLpzcYMP3YcPH1ZkZKSrywAAwBIHDx5U69atXV3GOTEXAwAasvPNxQ0+dDdt2lTSLwMREBDg4moAAHCO4uJiRUZG2ue5uoy5GADQEFV3Lm7wofv019gCAgKY6AEADU59+Lo2czEAoCE731xct08CAwAAAACgHiN0AwAAAABgEUI3AAAAAAAWIXQDAAAAAGARQjcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFiF0AwAAAABgEUI3AAAAAAAWIXQDAAAAAGARQjcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFiF0AwAAAABgEUI3AAAAAAAWIXQDAAAAAGARQjcAAAAAABbxcHUBQEMXPfUdp/a3f9YQp/YHAEBdw9wJoCHhSDcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFiF0AwAAAABgEUI3AAAAAAAWcWnojo6Ols1mq7QkJydLkkpKSpScnKzQ0FD5+/srMTFRBQUFriwZAAAAAIBqc2no3rp1q44cOWJf1q5dK0m6+eabJUkpKSlauXKlMjMztXHjRh0+fFgjR450ZckAAAAAAFSbS+/THRYW5vB41qxZateuna699loVFRVp0aJFWrp0qfr37y9JysjIUMeOHbV582b17t3bFSUDAAAAAFBtdeac7lOnTumVV17RXXfdJZvNppycHJWVlSk+Pt6+TWxsrNq0aaPs7Oyz9lNaWqri4mKHBQAAAAAAV6gzoXvFihUqLCzU2LFjJUn5+fny8vJSUFCQw3bh4eHKz88/az9paWkKDAy0L5GRkRZWDQAAAADA2dWZ0L1o0SINHjxYLVu2vKB+UlNTVVRUZF8OHjzopAoBAAAAAKgZl57TfdqBAwe0bt06vfHGG/a2iIgInTp1SoWFhQ5HuwsKChQREXHWvry9veXt7W1luQAAAGjEoqe+49T+9s8a4tT+ANQtdeJId0ZGhpo3b64hQ/7vF0737t3l6emprKwse1tubq7y8vIUFxfnijIBAAAAAKgRlx/prqioUEZGhpKSkuTh8X/lBAYGaty4cZo8ebJCQkIUEBCgCRMmKC4ujiuXAwAAAADqBZeH7nXr1ikvL0933XVXpXVz586Vm5ubEhMTVVpaqoSEBC1YsMAFVQIAAAAAUHMuD92DBg2SMabKdT4+PkpPT1d6evpFrgoAAAAAgAtXJ87pBgAAAACgISJ0AwAAAABgEUI3AACN2KFDh3TbbbcpNDRUvr6+6tKli7Zt22Zfb4zRI488ohYtWsjX11fx8fHavXu3CysGAKB+IXQDANBI/fDDD+rTp488PT21atUq7dq1S3PmzFFwcLB9m9mzZ2v+/Pl67rnntGXLFvn5+SkhIUElJSUurBwAgPrD5RdSAwAArvHkk08qMjJSGRkZ9raYmBj7v40xmjdvnv7yl79o+PDhkqR//etfCg8P14oVK3Trrbde9JoBAKhvONINAEAj9dZbb6lHjx66+eab1bx5c3Xr1k0vvviiff2+ffuUn5+v+Ph4e1tgYKB69eql7OxsV5QMAEC9Q+gGAKCR+vrrr7Vw4UK1b99ea9as0X333af7779fL730kiQpPz9fkhQeHu7wvPDwcPu6qpSWlqq4uNhhAQCgseLr5QAANFIVFRXq0aOHnnjiCUlSt27dtHPnTj333HNKSkqqdb9paWmaMWOGs8oEAKBe40g3AACNVIsWLdSpUyeHto4dOyovL0+SFBERIUkqKChw2KagoMC+riqpqakqKiqyLwcPHnRy5QAA1B+EbgAAGqk+ffooNzfXoe2rr75SVFSUpF8uqhYREaGsrCz7+uLiYm3ZskVxcXFn7dfb21sBAQEOCwAAjRVfLwcAoJFKSUnRVVddpSeeeEK33HKLPvroI73wwgt64YUXJEk2m02TJk3SY489pvbt2ysmJkbTpk1Ty5YtNWLECNcWDwBAPUHoBgCgkerZs6eWL1+u1NRUzZw5UzExMZo3b57GjBlj3+bBBx/UyZMndc8996iwsFB9+/bV6tWr5ePj48LKAQCoPwjdAAA0YjfccINuuOGGs6632WyaOXOmZs6ceRGrAgCg4SB0AwAANDLRU99xan/7Zw1xan8A0JBwITUAAAAAACxC6AYAAAAAwCKEbgAAAAAALELoBgAAAADAIoRuAAAAAAAsQugGAAAAAMAi3DIMOIOzb6MCAAAAoPHiSDcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFiF0AwAAAABgEUI3AAAAAAAWIXQDAAAAAGARQjcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFiF0AwAAAABgEUI3AAAAAAAWIXQDAAAAAGARQjcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFiF0AwAAAABgEUI3AAAAAAAWIXQDAAAAAGARQjcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFnF56D506JBuu+02hYaGytfXV126dNG2bdvs640xeuSRR9SiRQv5+voqPj5eu3fvdmHFAAAAAABUj0tD9w8//KA+ffrI09NTq1at0q5duzRnzhwFBwfbt5k9e7bmz5+v5557Tlu2bJGfn58SEhJUUlLiwsoBAAAAADg/D1e++JNPPqnIyEhlZGTY22JiYuz/NsZo3rx5+stf/qLhw4dLkv71r38pPDxcK1as0K233nrRawYAAAAAoLpceqT7rbfeUo8ePXTzzTerefPm6tatm1588UX7+n379ik/P1/x8fH2tsDAQPXq1UvZ2dmuKBkAAAAAgGpzaej++uuvtXDhQrVv315r1qzRfffdp/vvv18vvfSSJCk/P1+SFB4e7vC88PBw+7ozlZaWqri42GEBAAAAAMAVXPr18oqKCvXo0UNPPPGEJKlbt27auXOnnnvuOSUlJdWqz7S0NM2YMcOZZQIAAAAAUCsuPdLdokULderUyaGtY8eOysvLkyRFRERIkgoKChy2KSgosK87U2pqqoqKiuzLwYMHLagcAAAAAIDzc2no7tOnj3Jzcx3avvrqK0VFRUn65aJqERERysrKsq8vLi7Wli1bFBcXV2Wf3t7eCggIcFgAAAAAAHAFl369PCUlRVdddZWeeOIJ3XLLLfroo4/0wgsv6IUXXpAk2Ww2TZo0SY899pjat2+vmJgYTZs2TS1bttSIESNcWToAAAAAAOfl0tDds2dPLV++XKmpqZo5c6ZiYmI0b948jRkzxr7Ngw8+qJMnT+qee+5RYWGh+vbtq9WrV8vHx8eFlQMAAAAAcH4uDd2SdMMNN+iGG24463qbzaaZM2dq5syZF7EqAAAAAAAunEvP6QYAAAAAoCEjdAMAAAAAYBFCNwAAAAAAFiF0AwAAAABgEUI3AAAAAAAWIXQDAAAAAGARQjcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFiF0AwDQSD366KOy2WwOS2xsrH19SUmJkpOTFRoaKn9/fyUmJqqgoMCFFQMAUP8QugEAaMQ6d+6sI0eO2JdNmzbZ16WkpGjlypXKzMzUxo0bdfjwYY0cOdKF1QIAUP94uLoAAADgOh4eHoqIiKjUXlRUpEWLFmnp0qXq37+/JCkjI0MdO3bU5s2b1bt374tdKgAA9RJHugEAaMR2796tli1bqm3bthozZozy8vIkSTk5OSorK1N8fLx929jYWLVp00bZ2dmuKhcAgHqHI90AADRSvXr10uLFi9WhQwcdOXJEM2bM0NVXX62dO3cqPz9fXl5eCgoKcnhOeHi48vPzz9lvaWmpSktL7Y+Li4utKB8AgHqB0A0AQCM1ePBg+7+7du2qXr16KSoqSq+99pp8fX1r3W9aWppmzJjhjBJRT0RPfcfVJQBAnUXoxkXHxAwAdVNQUJAuvfRS7dmzRwMHDtSpU6dUWFjocLS7oKCgynPAfy01NVWTJ0+2Py4uLlZkZKRVZQMAUKdxTjcAAJAknThxQnv37lWLFi3UvXt3eXp6Kisry74+NzdXeXl5iouLO2c/3t7eCggIcFgAAGisONINAEAj9cADD2jo0KGKiorS4cOHNX36dLm7u2v06NEKDAzUuHHjNHnyZIWEhCggIEATJkxQXFwcVy4HAKAGCN0AADRS33zzjUaPHq3vv/9eYWFh6tu3rzZv3qywsDBJ0ty5c+Xm5qbExESVlpYqISFBCxYscHHVAADUL4RuAAAaqWXLlp1zvY+Pj9LT05Wenn6RKgIAoOEhdAMAAAANiLMvWrt/1hCn9gc0NoRuAE7FRA8AAAD8H65eDgAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiEC6kBjZyzL3wGAAAA4P9wpBsAAAAAAIsQugEAAAAAsAihGwAAAAAAixC6AQAAAACwCBdSA+oZLnwGAAAA1B8c6QYAAAAAwCKEbgAAAAAALELoBgAAAADAIoRuAAAAAAAsQugGAAAAAMAihG4AAAAAACxC6AYAAAAAwCKEbgAAAAAALELoBgAAAADAIoRuAAAAAAAsQugGAAAAAMAiLg3djz76qGw2m8MSGxtrX19SUqLk5GSFhobK399fiYmJKigocGHFAAAAAABUn8uPdHfu3FlHjhyxL5s2bbKvS0lJ0cqVK5WZmamNGzfq8OHDGjlypAurBQAAAACg+jxcXoCHhyIiIiq1FxUVadGiRVq6dKn69+8vScrIyFDHjh21efNm9e7d+2KXCgAAAABAjbj8SPfu3bvVsmVLtW3bVmPGjFFeXp4kKScnR2VlZYqPj7dvGxsbqzZt2ig7O/us/ZWWlqq4uNhhAQAAAADAFVwaunv16qXFixdr9erVWrhwofbt26err75ax48fV35+vry8vBQUFOTwnPDwcOXn55+1z7S0NAUGBtqXyMhIi/cCAAAAAICqufTr5YMHD7b/u2vXrurVq5eioqL02muvydfXt1Z9pqamavLkyfbHxcXFBG8AAAAAgEu4/OvlvxYUFKRLL71Ue/bsUUREhE6dOqXCwkKHbQoKCqo8B/w0b29vBQQEOCwAAAAAALiCyy+k9msnTpzQ3r17dfvtt6t79+7y9PRUVlaWEhMTJUm5ubnKy8tTXFyciysFAABAfRE99R1XlwCgEXNp6H7ggQc0dOhQRUVF6fDhw5o+fbrc3d01evRoBQYGaty4cZo8ebJCQkIUEBCgCRMmKC4ujiuXAwAAAADqBZeG7m+++UajR4/W999/r7CwMPXt21ebN29WWFiYJGnu3Llyc3NTYmKiSktLlZCQoAULFriyZAAAAAAAqs2loXvZsmXnXO/j46P09HSlp6dfpIoAAAAAAHCeOnUhNQAAAAAAGhJCNwAAAAAAFiF0AwAAAABgEUI3AAAAAAAWIXQDAAAAAGARQjcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFiF0AwAAAABgEUI3AAAAAAAWIXQDAAAAAGARQjcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFiF0AwAAAABgEUI3AAAAAAAWIXQDAABJ0qxZs2Sz2TRp0iR7W0lJiZKTkxUaGip/f38lJiaqoKDAdUUCAFDPELoBAIC2bt2q559/Xl27dnVoT0lJ0cqVK5WZmamNGzfq8OHDGjlypIuqBACg/iF0AwDQyJ04cUJjxozRiy++qODgYHt7UVGRFi1apKeeekr9+/dX9+7dlZGRoQ8//FCbN292YcUAANQfhG4AABq55ORkDRkyRPHx8Q7tOTk5Kisrc2iPjY1VmzZtlJ2dfdb+SktLVVxc7LAAANBYebi6AAAA4DrLli3Txx9/rK1bt1Zal5+fLy8vLwUFBTm0h4eHKz8//6x9pqWlacaMGc4uFQCAeokj3QAANFIHDx7UxIkTtWTJEvn4+Dit39TUVBUVFdmXgwcPOq1vAADqG0I3AACNVE5Ojo4ePaorr7xSHh4e8vDw0MaNGzV//nx5eHgoPDxcp06dUmFhocPzCgoKFBERcdZ+vb29FRAQ4LAAANBY8fVyAAAaqQEDBmjHjh0ObXfeeadiY2M1ZcoURUZGytPTU1lZWUpMTJQk5ebmKi8vT3Fxca4oGQCAeofQDQBAI9W0aVNddtllDm1+fn4KDQ21t48bN06TJ09WSEiIAgICNGHCBMXFxal3796uKBkAgHqH0A0AAM5q7ty5cnNzU2JiokpLS5WQkKAFCxa4uiwAAOoNQjcAALDbsGGDw2MfHx+lp6crPT3dNQUBAFDPcSE1AAAAAAAsQugGAAAAAMAihG4AAAAAACxC6AYAAAAAwCKEbgAAAAAALELoBgAAAADAIoRuAAAAAAAsQugGAAAAAMAihG4AAAAAACxC6AYAAAAAwCKEbgAAAAAALOLh6gIAAABwbtFT33F1CQCAWuJINwAAAAAAFqlV6G7btq2+//77Su2FhYVq27btBRcFAAAAAEBDUKvQvX//fpWXl1dqLy0t1aFDhy64KAAAAAAAGoIandP91ltv2f+9Zs0aBQYG2h+Xl5crKytL0dHRtSpk1qxZSk1N1cSJEzVv3jxJUklJif70pz9p2bJlKi0tVUJCghYsWKDw8PBavQYAAAAAABdTjUL3iBEjJEk2m01JSUkO6zw9PRUdHa05c+bUuIitW7fq+eefV9euXR3aU1JS9M477ygzM1OBgYEaP368Ro4cqQ8++KDGrwEAAAAAwMVWo9BdUVEhSYqJidHWrVvVrFmzCy7gxIkTGjNmjF588UU99thj9vaioiItWrRIS5cuVf/+/SVJGRkZ6tixozZv3qzevXtf8GsDAAAAAGClWp3TvW/fPqcEbklKTk7WkCFDFB8f79Cek5OjsrIyh/bY2Fi1adNG2dnZTnltAAAAAACsVOv7dGdlZSkrK0tHjx61HwE/7Z///Ge1+li2bJk+/vhjbd26tdK6/Px8eXl5KSgoyKE9PDxc+fn5Z+2ztLRUpaWl9sfFxcXVqgUAAAAAAGer1ZHuGTNmaNCgQcrKytJ3332nH374wWGpjoMHD2rixIlasmSJfHx8alNGldLS0hQYGGhfIiMjndY3AAAAAAA1Uasj3c8995wWL16s22+/vdYvnJOTo6NHj+rKK6+0t5WXl+u9997Ts88+qzVr1ujUqVMqLCx0ONpdUFCgiIiIs/abmpqqyZMn2x8XFxcTvAEAAAAALlGr0H3q1CldddVVF/TCAwYM0I4dOxza7rzzTsXGxmrKlCmKjIyUp6ensrKylJiYKEnKzc1VXl6e4uLiztqvt7e3vL29L6g2AAAAAACcoVah+/e//72WLl2qadOm1fqFmzZtqssuu8yhzc/PT6Ghofb2cePGafLkyQoJCVFAQIAmTJiguLg4rlwOAAAAAKgXahW6S0pK9MILL2jdunXq2rWrPD09HdY/9dRTTilu7ty5cnNzU2JiokpLS5WQkKAFCxY4pW8AAAAAAKxWq9D92Wef6YorrpAk7dy502GdzWardTEbNmxweOzj46P09HSlp6fXuk8AAAAAAFylVqF7/fr1zq4DAAAAAIAGp1a3DAMAAAAAAOdXqyPd11133Tm/Rv7uu+/WuiAAAAAAABqKWoXu0+dzn1ZWVqbt27dr586dSkpKckZdAAAAQKMQPfUdV5cAwEK1Ct1z586tsv3RRx/ViRMnLqggAAAAAAAaCqee033bbbfpn//8pzO7BAAAAACg3nJq6M7OzpaPj48zuwQAAAAAoN6q1dfLR44c6fDYGKMjR45o27ZtmjZtmlMKAwAAAACgvqtV6A4MDHR47Obmpg4dOmjmzJkaNGiQUwoDAAAAAKC+q1XozsjIcHYdAAAAAOogZ19dff+sIU7tD6jrahW6T8vJydEXX3whSercubO6devmlKJQt3AbCwAAAAConVqF7qNHj+rWW2/Vhg0bFBQUJEkqLCzUddddp2XLliksLMyZNQIAAAAAUC/V6urlEyZM0PHjx/X555/r2LFjOnbsmHbu3Kni4mLdf//9zq4RAAAAAIB6qVZHulevXq1169apY8eO9rZOnTopPT2dC6kBAAAAAPD/1epId0VFhTw9PSu1e3p6qqKi4oKLAgAAAACgIahV6O7fv78mTpyow4cP29sOHTqklJQUDRgwwGnFAQAAAABQn9UqdD/77LMqLi5WdHS02rVrp3bt2ikmJkbFxcV65plnnF0jAAAAAAD1Uq3O6Y6MjNTHH3+sdevW6csvv5QkdezYUfHx8U4tDgAAAACA+qxGR7rfffddderUScXFxbLZbBo4cKAmTJigCRMmqGfPnurcubPef/99q2oFAAAAAKBeqVHonjdvnu6++24FBARUWhcYGKh7771XTz31lNOKAwAAAACgPqtR6P700091/fXXn3X9oEGDlJOTc8FFAQAAAADQENTonO6CgoIqbxVm78zDQ99+++0FFwUAAHAu0VPfcWp/+2cNcWp/zq4PAFB/1ehId6tWrbRz586zrv/ss8/UokWLCy4KAAAAAICGoEah+7e//a2mTZumkpKSSut++uknTZ8+XTfccIPTigMAANZZuHChunbtqoCAAAUEBCguLk6rVq2yry8pKVFycrJCQ0Pl7++vxMREFRQUuLBiAADqnxqF7r/85S86duyYLr30Us2ePVtvvvmm3nzzTT355JPq0KGDjh07pocfftiqWgEAgBO1bt1as2bNUk5OjrZt26b+/ftr+PDh+vzzzyVJKSkpWrlypTIzM7Vx40YdPnxYI0eOdHHVAADULzU6pzs8PFwffvih7rvvPqWmpsoYI0my2WxKSEhQenq6wsPDLSkUAAA419ChQx0eP/7441q4cKE2b96s1q1ba9GiRVq6dKn69+8vScrIyFDHjh21efNm9e7d2xUlAwBQ79QodEtSVFSU/vvf/+qHH37Qnj17ZIxR+/btFRwcbEV9AADgIigvL1dmZqZOnjypuLg45eTkqKysTPHx8fZtYmNj1aZNG2VnZ58zdJeWlqq0tNT+uLi42NLaAQCoy2ocuk8LDg5Wz549nVkLAAC4yHbs2KG4uDiVlJTI399fy5cvV6dOnbR9+3Z5eXkpKCjIYfvw8HDl5+efs8+0tDTNmDHDwqoBAKg/anRONwAAaFg6dOig7du3a8uWLbrvvvuUlJSkXbt2XVCfqampKioqsi8HDx50UrUAANQ/tT7SDQAA6j8vLy9dcsklkqTu3btr69atevrppzVq1CidOnVKhYWFDke7CwoKFBERcc4+vb295e3tbWXZAADUGxzpBgAAdhUVFSotLVX37t3l6emprKws+7rc3Fzl5eUpLi7OhRUCAFC/cKQbAIBGKjU1VYMHD1abNm10/PhxLV26VBs2bNCaNWsUGBiocePGafLkyQoJCVFAQIAmTJiguLg4rlwOAEANELoBAGikjh49qjvuuENHjhxRYGCgunbtqjVr1mjgwIGSpLlz58rNzU2JiYkqLS1VQkKCFixY4OKqAQCoXwjdAAA0UosWLTrneh8fH6Wnpys9Pf0iVQQAQMPDOd0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEU8XF0AAJxL9NR3nNrf/llDnNofAAAAcC4uPdK9cOFCde3aVQEBAQoICFBcXJxWrVplX19SUqLk5GSFhobK399fiYmJKigocGHFAAAAAABUn0tDd+vWrTVr1izl5ORo27Zt6t+/v4YPH67PP/9ckpSSkqKVK1cqMzNTGzdu1OHDhzVy5EhXlgwAAAAAQLW59OvlQ4cOdXj8+OOPa+HChdq8ebNat26tRYsWaenSperfv78kKSMjQx07dtTmzZvVu3dvV5QMAAAAAEC11ZkLqZWXl2vZsmU6efKk4uLilJOTo7KyMsXHx9u3iY2NVZs2bZSdnX3WfkpLS1VcXOywAAAAAADgCi4P3Tt27JC/v7+8vb31hz/8QcuXL1enTp2Un58vLy8vBQUFOWwfHh6u/Pz8s/aXlpamwMBA+xIZGWnxHgAAAAAAUDWXh+4OHTpo+/bt2rJli+677z4lJSVp165dte4vNTVVRUVF9uXgwYNOrBYAAAAAgOpz+S3DvLy8dMkll0iSunfvrq1bt+rpp5/WqFGjdOrUKRUWFjoc7S4oKFBERMRZ+/P29pa3t7fVZQMAAAAAcF4uP9J9poqKCpWWlqp79+7y9PRUVlaWfV1ubq7y8vIUFxfnwgoBAAAAAKgelx7pTk1N1eDBg9WmTRsdP35cS5cu1YYNG7RmzRoFBgZq3Lhxmjx5skJCQhQQEKAJEyYoLi6OK5cDAAAAAOoFl4buo0eP6o477tCRI0cUGBiorl27as2aNRo4cKAkae7cuXJzc1NiYqJKS0uVkJCgBQsWuLJkAAAAAACqzaWhe9GiRedc7+Pjo/T0dKWnp1+kigAAAAAAcJ46d043AAAAAAANBaEbAAAAAACLELoBAAAAALCIy+/TDQAAAKDxiJ76jlP72z9riFP7A5yNI90AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYxMPVBQAAAABAbUVPfcep/e2fNcSp/QEc6QYAAAAAwCKEbgAAAAAALELoBgAAAADAIoRuAAAAAAAsQugGAAAAAMAihG4AAAAAACxC6AYAAAAAwCKEbgAAGqm0tDT17NlTTZs2VfPmzTVixAjl5uY6bFNSUqLk5GSFhobK399fiYmJKigocFHFAADUP4RuAAAaqY0bNyo5OVmbN2/W2rVrVVZWpkGDBunkyZP2bVJSUrRy5UplZmZq48aNOnz4sEaOHOnCqgEAqF88XF0AAABwjdWrVzs8Xrx4sZo3b66cnBxdc801Kioq0qJFi7R06VL1799fkpSRkaGOHTtq8+bN6t27tyvKBgCgXuFINwAAkCQVFRVJkkJCQiRJOTk5KisrU3x8vH2b2NhYtWnTRtnZ2Wftp7S0VMXFxQ4LAACNFaEbAACooqJCkyZNUp8+fXTZZZdJkvLz8+Xl5aWgoCCHbcPDw5Wfn3/WvtLS0hQYGGhfIiMjrSwdAIA6jdANAACUnJysnTt3atmyZRfcV2pqqoqKiuzLwYMHnVAhAAD1E+d0AwDQyI0fP15vv/223nvvPbVu3dreHhERoVOnTqmwsNDhaHdBQYEiIiLO2p+3t7e8vb2tLBkAgHqD0A0AFyB66jtO7W//rCFO7Q84F2OMJkyYoOXLl2vDhg2KiYlxWN+9e3d5enoqKytLiYmJkqTc3Fzl5eUpLi7OFSUDAFDvELoBAGikkpOTtXTpUr355ptq2rSp/TztwMBA+fr6KjAwUOPGjdPkyZMVEhKigIAATZgwQXFxcVy5HACAaiJ0AwDQSC1cuFCS1K9fP4f2jIwMjR07VpI0d+5cubm5KTExUaWlpUpISNCCBQsucqUAANRfhG4AABopY8x5t/Hx8VF6errS09MvQkUAADQ8XL0cAAAAAACLELoBAAAAALCIS0N3WlqaevbsqaZNm6p58+YaMWKEcnNzHbYpKSlRcnKyQkND5e/vr8TERBUUFLioYgAAAAAAqs+loXvjxo1KTk7W5s2btXbtWpWVlWnQoEE6efKkfZuUlBStXLlSmZmZ2rhxow4fPqyRI0e6sGoAAAAAAKrHpRdSW716tcPjxYsXq3nz5srJydE111yjoqIiLVq0SEuXLlX//v0l/XJF1Y4dO2rz5s3crgQAAAAAUKfVqXO6i4qKJEkhISGSpJycHJWVlSk+Pt6+TWxsrNq0aaPs7GyX1AgAAAAAQHXVmVuGVVRUaNKkSerTp48uu+wySVJ+fr68vLwUFBTksG14eLjy8/Or7Ke0tFSlpaX2x8XFxZbVDAAAAADAudSZI93JycnauXOnli1bdkH9pKWlKTAw0L5ERkY6qUIAAAAAAGqmToTu8ePH6+2339b69evVunVre3tERIROnTqlwsJCh+0LCgoUERFRZV+pqakqKiqyLwcPHrSydAAAAAAAzsqlodsYo/Hjx2v58uV69913FRMT47C+e/fu8vT0VFZWlr0tNzdXeXl5iouLq7JPb29vBQQEOCwAAAAAALiCS8/pTk5O1tKlS/Xmm2+qadOm9vO0AwMD5evrq8DAQI0bN06TJ09WSEiIAgICNGHCBMXFxXHlcgAAAABAnefS0L1w4UJJUr9+/RzaMzIyNHbsWEnS3Llz5ebmpsTERJWWliohIUELFiy4yJUCAAAAAFBzLg3dxpjzbuPj46P09HSlp6dfhIoahuip77i6BKDO4v8HAAAALqY6cSE1AAAAAAAaIkI3AAAAAAAWIXQDAAAAAGARQjcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFiF0AwAAAABgEUI3AAAAAAAWIXQDAAAAAGARQjcAAAAAABbxcHUBAAAArhY99R1XlwAAaKA40g0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARTxcXQAA4P9ET33Hqf3tnzXEqf0BAACgZjjSDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AADRi7733noYOHaqWLVvKZrNpxYoVDuuNMXrkkUfUokUL+fr6Kj4+Xrt373ZNsQAA1EOEbgAAGrGTJ0/q8ssvV3p6epXrZ8+erfnz5+u5557Tli1b5Ofnp4SEBJWUlFzkSgEAqJ88XF0AAABwncGDB2vw4MFVrjPGaN68efrLX/6i4cOHS5L+9a9/KTw8XCtWrNCtt956MUsFAKBe4kg3AACo0r59+5Sfn6/4+Hh7W2BgoHr16qXs7GwXVgYAQP3BkW4AAFCl/Px8SVJ4eLhDe3h4uH1dVUpLS1VaWmp/XFxcbE2BAADUA4RuAADgVGlpaZoxY4arywCAWome+o5T+9s/a4hT+0P9w9fLAQBAlSIiIiRJBQUFDu0FBQX2dVVJTU1VUVGRfTl48KCldQIAUJe5NHRzmxIAAOqumJgYRUREKCsry95WXFysLVu2KC4u7qzP8/b2VkBAgMMCAEBj5dLQzW1KAABwrRMnTmj79u3avn27pF8unrZ9+3bl5eXJZrNp0qRJeuyxx/TWW29px44duuOOO9SyZUuNGDHCpXUDAFBfuPScbm5TAgCAa23btk3XXXed/fHkyZMlSUlJSVq8eLEefPBBnTx5Uvfcc48KCwvVt29frV69Wj4+Pq4qGQCAeqXOXkjtfLcpOVvo5oqpAABUX79+/WSMOet6m82mmTNnaubMmRexKgAAGo46eyG12t6mJC0tTYGBgfYlMjLS0joBAAAAADibOhu6a4srpgIAAAAA6oo6G7pre5sSrpgKAAAAAKgr6mzoru1tSgAAAAAAqCtceiG1EydOaM+ePfbHp29TEhISojZt2thvU9K+fXvFxMRo2rRp3KYEAAAAAFBvuDR0c5sSAAAAAEBD5tLQzW1KAAAAAAANWZ09pxsAAAAAgPqO0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWMTD1QVAip76jqtLAAAAAABYgCPdAAAAAABYhCPdAAAAAGARZ3+rdf+sIU7tD9bjSDcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFiF0AwAAAABgEUI3AAAAAAAWIXQDAAAAAGARQjcAAAAAABYhdAMAAAAAYBEPVxdQHzn7BvcAAAAAgIaJI90AAAAAAFiE0A0AAAAAgEX4ejkAAAAA1BPOPtV1/6whTu0PlXGkGwAAAAAAi3CkGwBQbXX9QpL8tR4AANQ1HOkGAAAAAMAihG4AAAAAACxC6AYAAAAAwCKEbgAAAAAALELoBgAAAADAIoRuAAAAAAAsQugGAAAAAMAihG4AAAAAACxC6AYAAAAAwCKEbgAAAAAALOLh6gIAANaJnvqOq0uo15w9fvtnDXFqfwAAoO7jSDcAAAAAABYhdAMAAAAAYBFCNwAAAAAAFiF0AwAAAABgES6kBgBoMLhwHAAANVPXLxpa1+urjnpxpDs9PV3R0dHy8fFRr1699NFHH7m6JAAAGhXmYgAAaqfOh+5///vfmjx5sqZPn66PP/5Yl19+uRISEnT06FFXlwYAQKPAXAwAQO3V+dD91FNP6e6779add96pTp066bnnnlOTJk30z3/+09WlAQDQKDAXAwBQe3U6dJ86dUo5OTmKj4+3t7m5uSk+Pl7Z2dkurAwAgMaBuRgAgAtTpy+k9t1336m8vFzh4eEO7eHh4fryyy+rfE5paalKS0vtj4uKiiRJxcXFTqurovRHp/UFAGg8nDkXne7LGOO0PqvCXAwAqAln/q6XnP/73hVzcZ0O3bWRlpamGTNmVGqPjIx0QTUAAPyfwHnO7/P48eMKDAx0fscXgLkYABovK+Y6Z3LFXFynQ3ezZs3k7u6ugoICh/aCggJFRERU+ZzU1FRNnjzZ/riiokLHjh1TaGiobDZbtV63uLhYkZGROnjwoAICAmq/Aw0c41Q9jFP1ME7VwzhVT2MYJ2OMjh8/rpYtW1r6Oq6aiy+GxvA+uVCM0bkxPufHGJ0fY3R+dXWMqjsX1+nQ7eXlpe7duysrK0sjRoyQ9MvEnZWVpfHjx1f5HG9vb3l7ezu0BQUF1er1AwIC6tQPta5inKqHcaoexql6GKfqaejjdDGOcLt6Lr4YGvr7xBkYo3NjfM6PMTo/xuj86uIYVWcurtOhW5ImT56spKQk9ejRQ7/5zW80b948nTx5UnfeeaerSwMAoFFgLgYAoPbqfOgeNWqUvv32Wz3yyCPKz8/XFVdcodWrV1e6oAsAALAGczEAALVX50O3JI0fP/6sX2Gzgre3t6ZPn17pq3FwxDhVD+NUPYxT9TBO1cM4Od/FnosvBt4n58cYnRvjc36M0fkxRudX38fIZqy+1wgAAAAAAI2Um6sLAAAAAACgoSJ0AwAAAABgEUI3AAAAAAAWabSh+9FHH5XNZnNYYmNj7etLSkqUnJys0NBQ+fv7KzExUQUFBS6s+OJ47733NHToULVs2VI2m00rVqxwWG+M0SOPPKIWLVrI19dX8fHx2r17t8M2x44d05gxYxQQEKCgoCCNGzdOJ06cuIh7Yb3zjdPYsWMrvb+uv/56h20awzilpaWpZ8+eatq0qZo3b64RI0YoNzfXYZvq/F/Ly8vTkCFD1KRJEzVv3lx//vOf9fPPP1/MXbFUdcapX79+ld5Tf/jDHxy2aejjtHDhQnXt2tV+j864uDitWrXKvp73EqrirN9DjcmsWbNks9k0adIkextjJB06dEi33XabQkND5evrqy5dumjbtm329dX5jNRQlZeXa9q0aYqJiZGvr6/atWunv/71r/r1paMa2/jwmfr8zjVGZWVlmjJlirp06SI/Pz+1bNlSd9xxhw4fPuzQR30Zo0YbuiWpc+fOOnLkiH3ZtGmTfV1KSopWrlypzMxMbdy4UYcPH9bIkSNdWO3FcfLkSV1++eVKT0+vcv3s2bM1f/58Pffcc9qyZYv8/PyUkJCgkpIS+zZjxozR559/rrVr1+rtt9/We++9p3vuuedi7cJFcb5xkqTrr7/e4f316quvOqxvDOO0ceNGJScna/PmzVq7dq3Kyso0aNAgnTx50r7N+f6vlZeXa8iQITp16pQ+/PBDvfTSS1q8eLEeeeQRV+ySJaozTpJ09913O7ynZs+ebV/XGMapdevWmjVrlnJycrRt2zb1799fw4cP1+effy6J9xKq5ozfQ43J1q1b9fzzz6tr164O7Y19jH744Qf16dNHnp6eWrVqlXbt2qU5c+YoODjYvk11PiM1VE8++aQWLlyoZ599Vl988YWefPJJzZ49W88884x9m8Y2PnymPr9zjdGPP/6ojz/+WNOmTdPHH3+sN954Q7m5uRo2bJjDdvVmjEwjNX36dHP55ZdXua6wsNB4enqazMxMe9sXX3xhJJns7OyLVKHrSTLLly+3P66oqDARERHmb3/7m72tsLDQeHt7m1dffdUYY8yuXbuMJLN161b7NqtWrTI2m80cOnTootV+MZ05TsYYk5SUZIYPH37W5zTGcTLGmKNHjxpJZuPGjcaY6v1f++9//2vc3NxMfn6+fZuFCxeagIAAU1paenF34CI5c5yMMebaa681EydOPOtzGuM4GWNMcHCw+cc//sF7CdVWm99DjcXx48dN+/btzdq1ax1+5zBGxkyZMsX07dv3rOur8xmpIRsyZIi56667HNpGjhxpxowZY4xhfPhMfX5VfZ4+00cffWQkmQMHDhhj6tcYNeoj3bt371bLli3Vtm1bjRkzRnl5eZKknJwclZWVKT4+3r5tbGys2rRpo+zsbFeV63L79u1Tfn6+w7gEBgaqV69e9nHJzs5WUFCQevToYd8mPj5ebm5u2rJly0Wv2ZU2bNig5s2bq0OHDrrvvvv0/fff29c11nEqKiqSJIWEhEiq3v+17OxsdenSReHh4fZtEhISVFxcbD/C2dCcOU6nLVmyRM2aNdNll12m1NRU/fjjj/Z1jW2cysvLtWzZMp08eVJxcXG8l1Bttfk91FgkJydryJAhDmMhMUaS9NZbb6lHjx66+eab1bx5c3Xr1k0vvviifX11PiM1ZFdddZWysrL01VdfSZI+/fRTbdq0SYMHD5bE+JyJz9S1U1RUJJvNpqCgIEn1a4w8XF2Aq/Tq1UuLFy9Whw4ddOTIEc2YMUNXX321du7cqfz8fHl5edl/oKeFh4crPz/fNQXXAaf3/dcfWE8/Pr0uPz9fzZs3d1jv4eGhkJCQRjV2119/vUaOHKmYmBjt3btXDz30kAYPHqzs7Gy5u7s3ynGqqKjQpEmT1KdPH1122WWSVK3/a/n5+VW+506va2iqGidJ+t3vfqeoqCi1bNlSn332maZMmaLc3Fy98cYbkhrPOO3YsUNxcXEqKSmRv7+/li9frk6dOmn79u28l3Betf091BgsW7ZMH3/8sbZu3VppHWMkff3111q4cKEmT56shx56SFu3btX9998vLy8vJSUlVeszUkM2depUFRcXKzY2Vu7u7iovL9fjjz+uMWPGSKreZ8jGhM/UNVdSUqIpU6Zo9OjRCggIkFS/xqjRhu7Tf3mTpK5du6pXr16KiorSa6+9Jl9fXxdWhobg1ltvtf+7S5cu6tq1q9q1a6cNGzZowIABLqzMdZKTk7Vz506HayegsrON06/PT+rSpYtatGihAQMGaO/evWrXrt3FLtNlOnTooO3bt6uoqEj/+c9/lJSUpI0bN7q6LNQT/B6q2sGDBzVx4kStXbtWPj4+ri6nTqqoqFCPHj30xBNPSJK6deumnTt36rnnnlNSUpKLq3O91157TUuWLNHSpUvVuXNnbd++XZMmTVLLli0ZH1ywsrIy3XLLLTLGaOHCha4up1Ya9dfLfy0oKEiXXnqp9uzZo4iICJ06dUqFhYUO2xQUFCgiIsI1BdYBp/f9zKuV/npcIiIidPToUYf1P//8s44dO9aox65t27Zq1qyZ9uzZI6nxjdP48eP19ttva/369WrdurW9vTr/1yIiIqp8z51e15CcbZyq0qtXL0lyeE81hnHy8vLSJZdcou7duystLU2XX365nn76ad5LOK8L+T3U0OXk5Ojo0aO68sor5eHhIQ8PD23cuFHz58+Xh4eHwsPDG/0YtWjRQp06dXJo69ixo/3UxOp8RmrI/vznP2vq1Km69dZb1aVLF91+++1KSUlRWlqaJMbnTHymrr7TgfvAgQNau3at/Si3VL/GiND9/504cUJ79+5VixYt1L17d3l6eiorK8u+Pjc3V3l5eYqLi3Nhla4VExOjiIgIh3EpLi7Wli1b7OMSFxenwsJC5eTk2Ld59913VVFRYQ8JjdE333yj77//Xi1atJDUeMbJGKPx48dr+fLlevfddxUTE+Owvjr/1+Li4rRjxw6HX6qnf+me+QGovjrfOFVl+/btkuTwnmro41SViooKlZaW8l7CWTnj91BDN2DAAO3YsUPbt2+3Lz169NCYMWPs/27sY9SnT59Kt5r76quvFBUVJal6n5Eash9//FFubo6xwt3dXRUVFZIYnzPxmbp6Tgfu3bt3a926dQoNDXVYX6/GyLXXcXOdP/3pT2bDhg1m37595oMPPjDx8fGmWbNm5ujRo8YYY/7whz+YNm3amHfffdds27bNxMXFmbi4OBdXbb3jx4+bTz75xHzyySdGknnqqafMJ598Yr9K4KxZs0xQUJB58803zWeffWaGDx9uYmJizE8//WTv4/rrrzfdunUzW7ZsMZs2bTLt27c3o0ePdtUuWeJc43T8+HHzwAMPmOzsbLNv3z6zbt06c+WVV5r27dubkpISex+NYZzuu+8+ExgYaDZs2GCOHDliX3788Uf7Nuf7v/bzzz+byy67zAwaNMhs377drF692oSFhZnU1FRX7JIlzjdOe/bsMTNnzjTbtm0z+/btM2+++aZp27atueaaa+x9NIZxmjp1qtm4caPZt2+f+eyzz8zUqVONzWYz//vf/4wxvJdQNWf8HmqMzrxjQmMfo48++sh4eHiYxx9/3OzevdssWbLENGnSxLzyyiv2barzGamhSkpKMq1atTJvv/222bdvn3njjTdMs2bNzIMPPmjfprGND5+pz+9cY3Tq1CkzbNgw07p1a7N9+3aH39+/vuNIfRmjRhu6R40aZVq0aGG8vLxMq1atzKhRo8yePXvs63/66Sfzxz/+0QQHB5smTZqYG2+80Rw5csSFFV8c69evN5IqLUlJScaYX25xMG3aNBMeHm68vb3NgAEDTG5urkMf33//vRk9erTx9/c3AQEB5s477zTHjx93wd5Y51zj9OOPP5pBgwaZsLAw4+npaaKioszdd9/tcJsiYxrHOFU1RpJMRkaGfZvq/F/bv3+/GTx4sPH19TXNmjUzf/rTn0xZWdlF3hvrnG+c8vLyzDXXXGNCQkKMt7e3ueSSS8yf//xnU1RU5NBPQx+nu+66y0RFRRkvLy8TFhZmBgwYYA/cxvBeQtWc9XuosTkzdDNGxqxcudJcdtllxtvb28TGxpoXXnjBYX11PiM1VMXFxWbixImmTZs2xsfHx7Rt29Y8/PDDDuGosY0Pn6nP71xjtG/fvrP+/l6/fr29j/oyRjZjjHH+8XMAAAAAAMA53QAAAAAAWITQDQAAAACARQjdAAAAAABYhNANAAAAAIBFCN0AAAAAAFiE0A0AAAAAgEUI3QAAAAAAWITQDQAAAACARQjdAOqUsWPHasSIEa4uAwCARol5GHA+QjfQSLl6Ut2/f79sNpu2b9/ushoAAHAV5mGg8SB0AwAAAABgEUI3gEp27typwYMHy9/fX+Hh4br99tv13Xff2df369dP999/vx588EGFhIQoIiJCjz76qEMfX375pfr27SsfHx916tRJ69atk81m04oVKyRJMTExkqRu3brJZrOpX79+Ds//+9//rhYtWig0NFTJyckqKyuzcpcBAKgzmIeBhoXQDcBBYWGh+vfvr27dumnbtm1avXq1CgoKdMsttzhs99JLL8nPz09btmzR7NmzNXPmTK1du1aSVF5erhEjRqhJkybasmWLXnjhBT388MMOz//oo48kSevWrdORI0f0xhtv2NetX79ee/fu1fr16/XSSy9p8eLFWrx4sbU7DgBAHcA8DDQ8Hq4uAEDd8uyzz6pbt2564okn7G3//Oc/FRkZqa+++kqXXnqpJKlr166aPn26JKl9+/Z69tlnlZWVpYEDB2rt2rXau3evNmzYoIiICEnS448/roEDB9r7DAsLkySFhobatzktODhYzz77rNzd3RUbG6shQ4YoKytLd999t6X7DgCAqzEPAw0PoRuAg08//VTr16+Xv79/pXV79+51mOx/rUWLFjp69KgkKTc3V5GRkQ6T+G9+85tq19C5c2e5u7s79L1jx44a7QcAAPUR8zDQ8BC6ATg4ceKEhg4dqieffLLSuhYtWtj/7enp6bDOZrOpoqLCKTVY2TcAAHUZ8zDQ8BC6ATi48sor9frrrys6OloeHrX7FdGhQwcdPHhQBQUFCg8PlyRt3brVYRsvLy9Jv5x3BgAAfsE8DDQ8XEgNaMSKioq0fft2h+Wee+7RsWPHNHr0aG3dulV79+7VmjVrdOedd1Z7Yh44cKDatWunpKQkffbZZ/rggw/0l7/8RdIvfy2XpObNm8vX19d+gZiioiLL9hMAgLqIeRhoHAjdQCO2YcMGdevWzWH561//qg8++EDl5eUaNGiQunTpokmTJikoKEhubtX7leHu7q4VK1boxIkT6tmzp37/+9/br5rq4+MjSfLw8ND8+fP1/PPPq2XLlho+fLhl+wkAQF3EPAw0DjZjjHF1EQAavg8++EB9+/bVnj171K5dO1eXAwBAo8I8DLgOoRuAJZYvXy5/f3+1b99ee/bs0cSJExUcHKxNmza5ujQAABo85mGg7uBCagAscfz4cU2ZMkV5eXlq1qyZ4uPjNWfOHFeXBQBAo8A8DNQdHOkGAAAAAMAiXEgNAAAAAACLELoBAAAAALAIoRsAAAAAAIsQugEAAAAAsAihGwAAAAAAixC6AQAAAACwCKEbAAAAAACLELoBAAAAALAIoRsAAAAAAIv8P0ybSnRMJkh4AAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "fig, axes = plt.subplots(1, 2, figsize=(10, 5) )\n", + "axes[0].hist(NeW_Len_SciTLDR_Val_Text, bins = 20 )\n", + "axes[0].set_title(\"SciTLDR_Val_Text Length\")\n", + "axes[0].set_xlabel(\"Length\")\n", + "axes[0].set_ylabel(\"Count\")\n", + "\n", + "axes[1].hist(NeW_Len_SciTLDR_Val_Summary, bins = 20 )\n", + "axes[1].set_title(\"SciTLDR_Val_Summary Length\")\n", + "axes[1].set_xlabel(\"Length\")\n", + "plt.tight_layout()\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bb18fbcc", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "c017df6e", + "metadata": {}, + "source": [ + "# " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "60e727dc", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 574, + "id": "a1515af2", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(1832, 12)" + ] + }, + "execution_count": 574, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 575, + "id": "2187e392", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Index(['Clean_Title', 'Clean_Text', 'Clean_Summary', 'L_Clean_Text',\n", + " 'L_Clean_Summary', 'L_Clean_Title', 'T_Clean_Text', 'T_Clean_Summary',\n", + " 'Len_SciTLDR_train_Text', 'Len_SciTLDR_train_Summary', 'RangEe',\n", + " 'RangEe_TexT_SummarY', 'TexT > SummarY'],\n", + " dtype='object')" + ] + }, + "execution_count": 575, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['TexT > SummarY'] = (NN_SciTLDR_train['Len_SciTLDR_train_Text']>NN_SciTLDR_train['Len_SciTLDR_train_Summary'])\n", + "NN_SciTLDR_train.columns" + ] + }, + { + "cell_type": "code", + "execution_count": 576, + "id": "6e5f8cea", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True 1832\n", + "Name: TexT > SummarY, dtype: int64" + ] + }, + "execution_count": 576, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['TexT > SummarY'].value_counts()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "99904c33", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "980232f8", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 577, + "id": "8c32ba73", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(581, 12)" + ] + }, + "execution_count": 577, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 578, + "id": "3494d1d8", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Index(['Clean_Title', 'Clean_Text', 'Clean_Summary', 'L_Clean_Text',\n", + " 'L_Clean_Summary', 'L_Clean_Title', 'T_Clean_Text', 'T_Clean_Summary',\n", + " 'Len_SciTLDR_test_Text', 'Len_SciTLDR_test_Summary', 'RangEe',\n", + " 'RangEe_TexT_SummarY', 'TexT > SummarY'],\n", + " dtype='object')" + ] + }, + "execution_count": 578, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['TexT > SummarY'] = (NN_SciTLDR_test['Len_SciTLDR_test_Text']>NN_SciTLDR_test['Len_SciTLDR_test_Summary'])\n", + "NN_SciTLDR_test.columns" + ] + }, + { + "cell_type": "code", + "execution_count": 579, + "id": "04df0391", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True 581\n", + "Name: TexT > SummarY, dtype: int64" + ] + }, + "execution_count": 579, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['TexT > SummarY'].value_counts()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5eec6579", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "042e1d33", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 580, + "id": "47651f8e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(578, 12)" + ] + }, + "execution_count": 580, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 581, + "id": "3442362e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Index(['Clean_Title', 'Clean_Text', 'Clean_Summary', 'L_Clean_Text',\n", + " 'L_Clean_Summary', 'L_Clean_Title', 'T_Clean_Text', 'T_Clean_Summary',\n", + " 'Len_SciTLDR_Val_Text', 'Len_SciTLDR_Val_Summary', 'RangEe',\n", + " 'RangEe_TexT_SummarY', 'TexT > SummarY'],\n", + " dtype='object')" + ] + }, + "execution_count": 581, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['TexT > SummarY'] = (NN_SciTLDR_dev['Len_SciTLDR_Val_Text']>NN_SciTLDR_dev['Len_SciTLDR_Val_Summary'])\n", + "NN_SciTLDR_dev.columns" + ] + }, + { + "cell_type": "code", + "execution_count": 582, + "id": "e1e9bc5b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True 578\n", + "Name: TexT > SummarY, dtype: int64" + ] + }, + "execution_count": 582, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['TexT > SummarY'].value_counts()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "70e857a8", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c0562240", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 583, + "id": "72599a5d", + "metadata": {}, + "outputs": [], + "source": [ + "NN_SciTLDR_train.reset_index(drop=True, inplace=True)\n", + "NN_SciTLDR_test.reset_index(drop=True, inplace=True)\n", + "NN_SciTLDR_dev.reset_index(drop=True, inplace=True)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cecf26b9", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 584, + "id": "2ca71b0f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "393" + ] + }, + "execution_count": 584, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Len_SciTLDR_train_Text'].max()" + ] + }, + { + "cell_type": "code", + "execution_count": 585, + "id": "444bd908", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "50" + ] + }, + "execution_count": 585, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Len_SciTLDR_train_Text'].min()" + ] + }, + { + "cell_type": "code", + "execution_count": 586, + "id": "767c1fb7", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "45" + ] + }, + "execution_count": 586, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Len_SciTLDR_train_Summary'].max()" + ] + }, + { + "cell_type": "code", + "execution_count": 587, + "id": "1c5b4782", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "10" + ] + }, + "execution_count": 587, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_train['Len_SciTLDR_train_Summary'].min()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "aedd99c7", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 588, + "id": "c2e259c8", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "334" + ] + }, + "execution_count": 588, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Len_SciTLDR_test_Text'].max()" + ] + }, + { + "cell_type": "code", + "execution_count": 589, + "id": "51ebdda3", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "52" + ] + }, + "execution_count": 589, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Len_SciTLDR_test_Text'].min()" + ] + }, + { + "cell_type": "code", + "execution_count": 590, + "id": "e632974b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "137" + ] + }, + "execution_count": 590, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Len_SciTLDR_test_Summary'].max()" + ] + }, + { + "cell_type": "code", + "execution_count": 591, + "id": "2345773d", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "30" + ] + }, + "execution_count": 591, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_test['Len_SciTLDR_test_Summary'].min()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bf7d513c", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 592, + "id": "e2592e01", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "325" + ] + }, + "execution_count": 592, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Len_SciTLDR_Val_Text'].max()" + ] + }, + { + "cell_type": "code", + "execution_count": 593, + "id": "1edae0fb", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "55" + ] + }, + "execution_count": 593, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Len_SciTLDR_Val_Text'].min()" + ] + }, + { + "cell_type": "code", + "execution_count": 594, + "id": "c8c6c3cd", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "125" + ] + }, + "execution_count": 594, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Len_SciTLDR_Val_Summary'].max()" + ] + }, + { + "cell_type": "code", + "execution_count": 595, + "id": "dfc53f2c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "15" + ] + }, + "execution_count": 595, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "NN_SciTLDR_dev['Len_SciTLDR_Val_Summary'].min()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a2b5ae39", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4d058a6f", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 596, + "id": "9947e1a4", + "metadata": {}, + "outputs": [], + "source": [ + "FN_SciTLDR_train = NN_SciTLDR_train[['Clean_Title','Clean_Text','Clean_Summary']].copy()\n", + "FN_SciTLDR_test = NN_SciTLDR_test[['Clean_Title','Clean_Text','Clean_Summary']].copy()\n", + "FN_SciTLDR_dev = NN_SciTLDR_dev[['Clean_Title','Clean_Text','Clean_Summary']].copy()\n" + ] + }, + { + "cell_type": "code", + "execution_count": 597, + "id": "d63d326e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(1832, 3)" + ] + }, + "execution_count": 597, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "FN_SciTLDR_train.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 598, + "id": "e3cd7796", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(581, 3)" + ] + }, + "execution_count": 598, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "FN_SciTLDR_test.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 599, + "id": "5a28399a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(578, 3)" + ] + }, + "execution_count": 599, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "FN_SciTLDR_dev.shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "de4e231e", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 600, + "id": "2b9e7cba", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Clean_TitleClean_TextClean_Summary
0Critical Points of Linear Neural Networks: Ana...Due to the success of deep learning to solving...We provide necessary and sufficient analytical...
1Biologically-Plausible Learning Algorithms Can...The backpropagation algorithm is often thought...Biologically plausible learning algorithms, pa...
2Logic and the 2-Simplicial TransformerWe introduce the 2-simplicial Transformer, an ...We introduce the 2-simplicial Transformer and ...
3Long-term Forecasting using Tensor-Train RNNsWe present Tensor-Train RNN, a novel family of...Accurate forecasting over very long time horiz...
4Variational Message Passing with Structured In...Recent efforts on combining deep models with p...We propose a variational message-passing algor...
\n", + "
" + ], + "text/plain": [ + " Clean_Title \\\n", + "0 Critical Points of Linear Neural Networks: Ana... \n", + "1 Biologically-Plausible Learning Algorithms Can... \n", + "2 Logic and the 2-Simplicial Transformer \n", + "3 Long-term Forecasting using Tensor-Train RNNs \n", + "4 Variational Message Passing with Structured In... \n", + "\n", + " Clean_Text \\\n", + "0 Due to the success of deep learning to solving... \n", + "1 The backpropagation algorithm is often thought... \n", + "2 We introduce the 2-simplicial Transformer, an ... \n", + "3 We present Tensor-Train RNN, a novel family of... \n", + "4 Recent efforts on combining deep models with p... \n", + "\n", + " Clean_Summary \n", + "0 We provide necessary and sufficient analytical... \n", + "1 Biologically plausible learning algorithms, pa... \n", + "2 We introduce the 2-simplicial Transformer and ... \n", + "3 Accurate forecasting over very long time horiz... \n", + "4 We propose a variational message-passing algor... " + ] + }, + "execution_count": 600, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "FN_SciTLDR_train.head()" + ] + }, + { + "cell_type": "code", + "execution_count": 601, + "id": "e57b6ec2", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Clean_TitleClean_TextClean_Summary
0FearNet: Brain-Inspired Model for Incremental ...Incremental class learning involves sequential...FearNet is a memory efficient neural-network, ...
1Improving Sentence Representations with Multi-...Multi-view learning can provide self-supervisi...Multi-view learning improves unsupervised sent...
2Learning objects from pixelsWe show how discrete objects can be learnt in ...We show how discrete objects can be learnt in ...
3Learning what and where to attendMost recent gains in visual recognition have o...A large-scale dataset for training attention m...
4EFFICIENT TWO-STEP ADVERSARIAL DEFENSE FOR DEE...In recent years, deep neural networks have dem...We proposed a time-efficient defense method ag...
\n", + "
" + ], + "text/plain": [ + " Clean_Title \\\n", + "0 FearNet: Brain-Inspired Model for Incremental ... \n", + "1 Improving Sentence Representations with Multi-... \n", + "2 Learning objects from pixels \n", + "3 Learning what and where to attend \n", + "4 EFFICIENT TWO-STEP ADVERSARIAL DEFENSE FOR DEE... \n", + "\n", + " Clean_Text \\\n", + "0 Incremental class learning involves sequential... \n", + "1 Multi-view learning can provide self-supervisi... \n", + "2 We show how discrete objects can be learnt in ... \n", + "3 Most recent gains in visual recognition have o... \n", + "4 In recent years, deep neural networks have dem... \n", + "\n", + " Clean_Summary \n", + "0 FearNet is a memory efficient neural-network, ... \n", + "1 Multi-view learning improves unsupervised sent... \n", + "2 We show how discrete objects can be learnt in ... \n", + "3 A large-scale dataset for training attention m... \n", + "4 We proposed a time-efficient defense method ag... " + ] + }, + "execution_count": 601, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "FN_SciTLDR_test.head()" + ] + }, + { + "cell_type": "code", + "execution_count": 602, + "id": "d95bab8e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Clean_TitleClean_TextClean_Summary
0Adaptive Loss Scaling for Mixed Precision Trai...Mixed precision training is becoming a practic...We devise adaptive loss scaling to improve mix...
1Deep Perm-Set Net: Learn to predict sets with ...Many real-world problems, e.g. object detectio...We present a novel approach for learning to pr...
2Foveated Downsampling TechniquesFoveation is an important part of human vision...We compare object recognition performance on i...
3Training for Faster Adversarial Robustness Ver...We explore the concept of co-design in the con...We develop methods to train deep neural models...
4Towards an Adversarially Robust Normalization ...Batch Normalization has shown to be effective ...Investigation of how BatchNorm causes adversar...
\n", + "
" + ], + "text/plain": [ + " Clean_Title \\\n", + "0 Adaptive Loss Scaling for Mixed Precision Trai... \n", + "1 Deep Perm-Set Net: Learn to predict sets with ... \n", + "2 Foveated Downsampling Techniques \n", + "3 Training for Faster Adversarial Robustness Ver... \n", + "4 Towards an Adversarially Robust Normalization ... \n", + "\n", + " Clean_Text \\\n", + "0 Mixed precision training is becoming a practic... \n", + "1 Many real-world problems, e.g. object detectio... \n", + "2 Foveation is an important part of human vision... \n", + "3 We explore the concept of co-design in the con... \n", + "4 Batch Normalization has shown to be effective ... \n", + "\n", + " Clean_Summary \n", + "0 We devise adaptive loss scaling to improve mix... \n", + "1 We present a novel approach for learning to pr... \n", + "2 We compare object recognition performance on i... \n", + "3 We develop methods to train deep neural models... \n", + "4 Investigation of how BatchNorm causes adversar... " + ] + }, + "execution_count": 602, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "FN_SciTLDR_dev.head()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0964d214", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8f29ae8e", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 603, + "id": "571961a8", + "metadata": {}, + "outputs": [], + "source": [ + "FN_SciTLDR_train.to_csv(r'E:\\CLEaaN_DaTaSeTS\\C_END\\FN_SciTLDR_train.csv')\n", + "\n", + "FN_SciTLDR_test.to_csv(r'E:\\CLEaaN_DaTaSeTS\\C_END\\FN_SciTLDR_test.csv')\n", + "\n", + "FN_SciTLDR_dev.to_csv(r'E:\\CLEaaN_DaTaSeTS\\C_END\\FN_SciTLDR_dev.csv')\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d90db613", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3607473d", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cad509b5", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.0" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +}